* optabs.c (init_optabs): Initialize fixtab, fixtrunctab, floattab,
[official-gcc.git] / gcc / expr.c
bloba5c306fd3a5389e6cc505cd72b80fb686bb77243
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000
3 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "insn-config.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37 #include "expr.h"
38 #include "recog.h"
39 #include "reload.h"
40 #include "output.h"
41 #include "typeclass.h"
42 #include "defaults.h"
43 #include "toplev.h"
44 #include "ggc.h"
45 #include "intl.h"
46 #include "tm_p.h"
48 #ifndef ACCUMULATE_OUTGOING_ARGS
49 #define ACCUMULATE_OUTGOING_ARGS 0
50 #endif
52 /* Supply a default definition for PUSH_ARGS. */
53 #ifndef PUSH_ARGS
54 #ifdef PUSH_ROUNDING
55 #define PUSH_ARGS !ACCUMULATE_OUTGOING_ARGS
56 #else
57 #define PUSH_ARGS 0
58 #endif
59 #endif
61 /* Decide whether a function's arguments should be processed
62 from first to last or from last to first.
64 They should if the stack and args grow in opposite directions, but
65 only if we have push insns. */
67 #ifdef PUSH_ROUNDING
69 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
70 #define PUSH_ARGS_REVERSED /* If it's last to first. */
71 #endif
73 #endif
75 #ifndef STACK_PUSH_CODE
76 #ifdef STACK_GROWS_DOWNWARD
77 #define STACK_PUSH_CODE PRE_DEC
78 #else
79 #define STACK_PUSH_CODE PRE_INC
80 #endif
81 #endif
83 /* Assume that case vectors are not pc-relative. */
84 #ifndef CASE_VECTOR_PC_RELATIVE
85 #define CASE_VECTOR_PC_RELATIVE 0
86 #endif
88 /* If this is nonzero, we do not bother generating VOLATILE
89 around volatile memory references, and we are willing to
90 output indirect addresses. If cse is to follow, we reject
91 indirect addresses so a useful potential cse is generated;
92 if it is used only once, instruction combination will produce
93 the same indirect address eventually. */
94 int cse_not_expected;
96 /* Nonzero to generate code for all the subroutines within an
97 expression before generating the upper levels of the expression.
98 Nowadays this is never zero. */
99 int do_preexpand_calls = 1;
101 /* Don't check memory usage, since code is being emitted to check a memory
102 usage. Used when current_function_check_memory_usage is true, to avoid
103 infinite recursion. */
104 static int in_check_memory_usage;
106 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
107 static tree placeholder_list = 0;
109 /* This structure is used by move_by_pieces to describe the move to
110 be performed. */
111 struct move_by_pieces
113 rtx to;
114 rtx to_addr;
115 int autinc_to;
116 int explicit_inc_to;
117 rtx from;
118 rtx from_addr;
119 int autinc_from;
120 int explicit_inc_from;
121 unsigned HOST_WIDE_INT len;
122 HOST_WIDE_INT offset;
123 int reverse;
126 /* This structure is used by clear_by_pieces to describe the clear to
127 be performed. */
129 struct clear_by_pieces
131 rtx to;
132 rtx to_addr;
133 int autinc_to;
134 int explicit_inc_to;
135 unsigned HOST_WIDE_INT len;
136 HOST_WIDE_INT offset;
137 int reverse;
140 extern struct obstack permanent_obstack;
142 static rtx get_push_address PARAMS ((int));
144 static rtx enqueue_insn PARAMS ((rtx, rtx));
145 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
146 PARAMS ((unsigned HOST_WIDE_INT,
147 unsigned int));
148 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
149 struct move_by_pieces *));
150 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
151 unsigned int));
152 static void clear_by_pieces_1 PARAMS ((rtx (*) (rtx, ...),
153 enum machine_mode,
154 struct clear_by_pieces *));
155 static rtx get_subtarget PARAMS ((rtx));
156 static int is_zeros_p PARAMS ((tree));
157 static int mostly_zeros_p PARAMS ((tree));
158 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
159 HOST_WIDE_INT, enum machine_mode,
160 tree, tree, unsigned int, int));
161 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
162 HOST_WIDE_INT));
163 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
164 HOST_WIDE_INT, enum machine_mode,
165 tree, enum machine_mode, int,
166 unsigned int, HOST_WIDE_INT, int));
167 static enum memory_use_mode
168 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
169 static tree save_noncopied_parts PARAMS ((tree, tree));
170 static tree init_noncopied_parts PARAMS ((tree, tree));
171 static int safe_from_p PARAMS ((rtx, tree, int));
172 static int fixed_type_p PARAMS ((tree));
173 static rtx var_rtx PARAMS ((tree));
174 static int readonly_fields_p PARAMS ((tree));
175 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
176 static rtx expand_increment PARAMS ((tree, int, int));
177 static void preexpand_calls PARAMS ((tree));
178 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
179 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
180 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
181 rtx, rtx));
182 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
184 /* Record for each mode whether we can move a register directly to or
185 from an object of that mode in memory. If we can't, we won't try
186 to use that mode directly when accessing a field of that mode. */
188 static char direct_load[NUM_MACHINE_MODES];
189 static char direct_store[NUM_MACHINE_MODES];
191 /* If a memory-to-memory move would take MOVE_RATIO or more simple
192 move-instruction sequences, we will do a movstr or libcall instead. */
194 #ifndef MOVE_RATIO
195 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
196 #define MOVE_RATIO 2
197 #else
198 /* If we are optimizing for space (-Os), cut down the default move ratio. */
199 #define MOVE_RATIO (optimize_size ? 3 : 15)
200 #endif
201 #endif
203 /* This macro is used to determine whether move_by_pieces should be called
204 to perform a structure copy. */
205 #ifndef MOVE_BY_PIECES_P
206 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
207 (move_by_pieces_ninsns (SIZE, ALIGN) < MOVE_RATIO)
208 #endif
210 /* This array records the insn_code of insns to perform block moves. */
211 enum insn_code movstr_optab[NUM_MACHINE_MODES];
213 /* This array records the insn_code of insns to perform block clears. */
214 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
216 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
218 #ifndef SLOW_UNALIGNED_ACCESS
219 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
220 #endif
222 /* This is run once per compilation to set up which modes can be used
223 directly in memory and to initialize the block move optab. */
225 void
226 init_expr_once ()
228 rtx insn, pat;
229 enum machine_mode mode;
230 int num_clobbers;
231 rtx mem, mem1;
232 char *free_point;
234 start_sequence ();
236 /* Since we are on the permanent obstack, we must be sure we save this
237 spot AFTER we call start_sequence, since it will reuse the rtl it
238 makes. */
239 free_point = (char *) oballoc (0);
241 /* Try indexing by frame ptr and try by stack ptr.
242 It is known that on the Convex the stack ptr isn't a valid index.
243 With luck, one or the other is valid on any machine. */
244 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
245 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
247 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
248 pat = PATTERN (insn);
250 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
251 mode = (enum machine_mode) ((int) mode + 1))
253 int regno;
254 rtx reg;
256 direct_load[(int) mode] = direct_store[(int) mode] = 0;
257 PUT_MODE (mem, mode);
258 PUT_MODE (mem1, mode);
260 /* See if there is some register that can be used in this mode and
261 directly loaded or stored from memory. */
263 if (mode != VOIDmode && mode != BLKmode)
264 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
265 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
266 regno++)
268 if (! HARD_REGNO_MODE_OK (regno, mode))
269 continue;
271 reg = gen_rtx_REG (mode, regno);
273 SET_SRC (pat) = mem;
274 SET_DEST (pat) = reg;
275 if (recog (pat, insn, &num_clobbers) >= 0)
276 direct_load[(int) mode] = 1;
278 SET_SRC (pat) = mem1;
279 SET_DEST (pat) = reg;
280 if (recog (pat, insn, &num_clobbers) >= 0)
281 direct_load[(int) mode] = 1;
283 SET_SRC (pat) = reg;
284 SET_DEST (pat) = mem;
285 if (recog (pat, insn, &num_clobbers) >= 0)
286 direct_store[(int) mode] = 1;
288 SET_SRC (pat) = reg;
289 SET_DEST (pat) = mem1;
290 if (recog (pat, insn, &num_clobbers) >= 0)
291 direct_store[(int) mode] = 1;
295 end_sequence ();
296 obfree (free_point);
299 /* This is run at the start of compiling a function. */
301 void
302 init_expr ()
304 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
306 pending_chain = 0;
307 pending_stack_adjust = 0;
308 stack_pointer_delta = 0;
309 inhibit_defer_pop = 0;
310 saveregs_value = 0;
311 apply_args_value = 0;
312 forced_labels = 0;
315 void
316 mark_expr_status (p)
317 struct expr_status *p;
319 if (p == NULL)
320 return;
322 ggc_mark_rtx (p->x_saveregs_value);
323 ggc_mark_rtx (p->x_apply_args_value);
324 ggc_mark_rtx (p->x_forced_labels);
327 void
328 free_expr_status (f)
329 struct function *f;
331 free (f->expr);
332 f->expr = NULL;
335 /* Small sanity check that the queue is empty at the end of a function. */
337 void
338 finish_expr_for_function ()
340 if (pending_chain)
341 abort ();
344 /* Manage the queue of increment instructions to be output
345 for POSTINCREMENT_EXPR expressions, etc. */
347 /* Queue up to increment (or change) VAR later. BODY says how:
348 BODY should be the same thing you would pass to emit_insn
349 to increment right away. It will go to emit_insn later on.
351 The value is a QUEUED expression to be used in place of VAR
352 where you want to guarantee the pre-incrementation value of VAR. */
354 static rtx
355 enqueue_insn (var, body)
356 rtx var, body;
358 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
359 body, pending_chain);
360 return pending_chain;
363 /* Use protect_from_queue to convert a QUEUED expression
364 into something that you can put immediately into an instruction.
365 If the queued incrementation has not happened yet,
366 protect_from_queue returns the variable itself.
367 If the incrementation has happened, protect_from_queue returns a temp
368 that contains a copy of the old value of the variable.
370 Any time an rtx which might possibly be a QUEUED is to be put
371 into an instruction, it must be passed through protect_from_queue first.
372 QUEUED expressions are not meaningful in instructions.
374 Do not pass a value through protect_from_queue and then hold
375 on to it for a while before putting it in an instruction!
376 If the queue is flushed in between, incorrect code will result. */
379 protect_from_queue (x, modify)
380 register rtx x;
381 int modify;
383 register RTX_CODE code = GET_CODE (x);
385 #if 0 /* A QUEUED can hang around after the queue is forced out. */
386 /* Shortcut for most common case. */
387 if (pending_chain == 0)
388 return x;
389 #endif
391 if (code != QUEUED)
393 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
394 use of autoincrement. Make a copy of the contents of the memory
395 location rather than a copy of the address, but not if the value is
396 of mode BLKmode. Don't modify X in place since it might be
397 shared. */
398 if (code == MEM && GET_MODE (x) != BLKmode
399 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
401 register rtx y = XEXP (x, 0);
402 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
404 MEM_COPY_ATTRIBUTES (new, x);
406 if (QUEUED_INSN (y))
408 register rtx temp = gen_reg_rtx (GET_MODE (new));
409 emit_insn_before (gen_move_insn (temp, new),
410 QUEUED_INSN (y));
411 return temp;
413 return new;
415 /* Otherwise, recursively protect the subexpressions of all
416 the kinds of rtx's that can contain a QUEUED. */
417 if (code == MEM)
419 rtx tem = protect_from_queue (XEXP (x, 0), 0);
420 if (tem != XEXP (x, 0))
422 x = copy_rtx (x);
423 XEXP (x, 0) = tem;
426 else if (code == PLUS || code == MULT)
428 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
429 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
430 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
432 x = copy_rtx (x);
433 XEXP (x, 0) = new0;
434 XEXP (x, 1) = new1;
437 return x;
439 /* If the increment has not happened, use the variable itself. */
440 if (QUEUED_INSN (x) == 0)
441 return QUEUED_VAR (x);
442 /* If the increment has happened and a pre-increment copy exists,
443 use that copy. */
444 if (QUEUED_COPY (x) != 0)
445 return QUEUED_COPY (x);
446 /* The increment has happened but we haven't set up a pre-increment copy.
447 Set one up now, and use it. */
448 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
449 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
450 QUEUED_INSN (x));
451 return QUEUED_COPY (x);
454 /* Return nonzero if X contains a QUEUED expression:
455 if it contains anything that will be altered by a queued increment.
456 We handle only combinations of MEM, PLUS, MINUS and MULT operators
457 since memory addresses generally contain only those. */
460 queued_subexp_p (x)
461 rtx x;
463 register enum rtx_code code = GET_CODE (x);
464 switch (code)
466 case QUEUED:
467 return 1;
468 case MEM:
469 return queued_subexp_p (XEXP (x, 0));
470 case MULT:
471 case PLUS:
472 case MINUS:
473 return (queued_subexp_p (XEXP (x, 0))
474 || queued_subexp_p (XEXP (x, 1)));
475 default:
476 return 0;
480 /* Perform all the pending incrementations. */
482 void
483 emit_queue ()
485 register rtx p;
486 while ((p = pending_chain))
488 rtx body = QUEUED_BODY (p);
490 if (GET_CODE (body) == SEQUENCE)
492 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
493 emit_insn (QUEUED_BODY (p));
495 else
496 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
497 pending_chain = QUEUED_NEXT (p);
501 /* Copy data from FROM to TO, where the machine modes are not the same.
502 Both modes may be integer, or both may be floating.
503 UNSIGNEDP should be nonzero if FROM is an unsigned type.
504 This causes zero-extension instead of sign-extension. */
506 void
507 convert_move (to, from, unsignedp)
508 register rtx to, from;
509 int unsignedp;
511 enum machine_mode to_mode = GET_MODE (to);
512 enum machine_mode from_mode = GET_MODE (from);
513 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
514 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
515 enum insn_code code;
516 rtx libcall;
518 /* rtx code for making an equivalent value. */
519 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
521 to = protect_from_queue (to, 1);
522 from = protect_from_queue (from, 0);
524 if (to_real != from_real)
525 abort ();
527 /* If FROM is a SUBREG that indicates that we have already done at least
528 the required extension, strip it. We don't handle such SUBREGs as
529 TO here. */
531 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
532 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
533 >= GET_MODE_SIZE (to_mode))
534 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
535 from = gen_lowpart (to_mode, from), from_mode = to_mode;
537 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
538 abort ();
540 if (to_mode == from_mode
541 || (from_mode == VOIDmode && CONSTANT_P (from)))
543 emit_move_insn (to, from);
544 return;
547 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
549 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
550 abort ();
552 if (VECTOR_MODE_P (to_mode))
553 from = gen_rtx_SUBREG (to_mode, from, 0);
554 else
555 to = gen_rtx_SUBREG (from_mode, to, 0);
557 emit_move_insn (to, from);
558 return;
561 if (to_real != from_real)
562 abort ();
564 if (to_real)
566 rtx value;
568 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
570 /* Try converting directly if the insn is supported. */
571 if ((code = can_extend_p (to_mode, from_mode, 0))
572 != CODE_FOR_nothing)
574 emit_unop_insn (code, to, from, UNKNOWN);
575 return;
579 #ifdef HAVE_trunchfqf2
580 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
582 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
583 return;
585 #endif
586 #ifdef HAVE_trunctqfqf2
587 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
589 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
590 return;
592 #endif
593 #ifdef HAVE_truncsfqf2
594 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
596 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
597 return;
599 #endif
600 #ifdef HAVE_truncdfqf2
601 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
603 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
604 return;
606 #endif
607 #ifdef HAVE_truncxfqf2
608 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
610 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
611 return;
613 #endif
614 #ifdef HAVE_trunctfqf2
615 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
617 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
618 return;
620 #endif
622 #ifdef HAVE_trunctqfhf2
623 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
625 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
626 return;
628 #endif
629 #ifdef HAVE_truncsfhf2
630 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
632 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
633 return;
635 #endif
636 #ifdef HAVE_truncdfhf2
637 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
639 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
640 return;
642 #endif
643 #ifdef HAVE_truncxfhf2
644 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
646 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
647 return;
649 #endif
650 #ifdef HAVE_trunctfhf2
651 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
653 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
654 return;
656 #endif
658 #ifdef HAVE_truncsftqf2
659 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
661 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
662 return;
664 #endif
665 #ifdef HAVE_truncdftqf2
666 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
668 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
669 return;
671 #endif
672 #ifdef HAVE_truncxftqf2
673 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
675 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
676 return;
678 #endif
679 #ifdef HAVE_trunctftqf2
680 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
682 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
683 return;
685 #endif
687 #ifdef HAVE_truncdfsf2
688 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
690 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
691 return;
693 #endif
694 #ifdef HAVE_truncxfsf2
695 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
697 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
698 return;
700 #endif
701 #ifdef HAVE_trunctfsf2
702 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
704 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
705 return;
707 #endif
708 #ifdef HAVE_truncxfdf2
709 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
711 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
712 return;
714 #endif
715 #ifdef HAVE_trunctfdf2
716 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
718 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
719 return;
721 #endif
723 libcall = (rtx) 0;
724 switch (from_mode)
726 case SFmode:
727 switch (to_mode)
729 case DFmode:
730 libcall = extendsfdf2_libfunc;
731 break;
733 case XFmode:
734 libcall = extendsfxf2_libfunc;
735 break;
737 case TFmode:
738 libcall = extendsftf2_libfunc;
739 break;
741 default:
742 break;
744 break;
746 case DFmode:
747 switch (to_mode)
749 case SFmode:
750 libcall = truncdfsf2_libfunc;
751 break;
753 case XFmode:
754 libcall = extenddfxf2_libfunc;
755 break;
757 case TFmode:
758 libcall = extenddftf2_libfunc;
759 break;
761 default:
762 break;
764 break;
766 case XFmode:
767 switch (to_mode)
769 case SFmode:
770 libcall = truncxfsf2_libfunc;
771 break;
773 case DFmode:
774 libcall = truncxfdf2_libfunc;
775 break;
777 default:
778 break;
780 break;
782 case TFmode:
783 switch (to_mode)
785 case SFmode:
786 libcall = trunctfsf2_libfunc;
787 break;
789 case DFmode:
790 libcall = trunctfdf2_libfunc;
791 break;
793 default:
794 break;
796 break;
798 default:
799 break;
802 if (libcall == (rtx) 0)
803 /* This conversion is not implemented yet. */
804 abort ();
806 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
807 1, from, from_mode);
808 emit_move_insn (to, value);
809 return;
812 /* Now both modes are integers. */
814 /* Handle expanding beyond a word. */
815 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
816 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
818 rtx insns;
819 rtx lowpart;
820 rtx fill_value;
821 rtx lowfrom;
822 int i;
823 enum machine_mode lowpart_mode;
824 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
826 /* Try converting directly if the insn is supported. */
827 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
828 != CODE_FOR_nothing)
830 /* If FROM is a SUBREG, put it into a register. Do this
831 so that we always generate the same set of insns for
832 better cse'ing; if an intermediate assignment occurred,
833 we won't be doing the operation directly on the SUBREG. */
834 if (optimize > 0 && GET_CODE (from) == SUBREG)
835 from = force_reg (from_mode, from);
836 emit_unop_insn (code, to, from, equiv_code);
837 return;
839 /* Next, try converting via full word. */
840 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
841 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
842 != CODE_FOR_nothing))
844 if (GET_CODE (to) == REG)
845 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
846 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
847 emit_unop_insn (code, to,
848 gen_lowpart (word_mode, to), equiv_code);
849 return;
852 /* No special multiword conversion insn; do it by hand. */
853 start_sequence ();
855 /* Since we will turn this into a no conflict block, we must ensure
856 that the source does not overlap the target. */
858 if (reg_overlap_mentioned_p (to, from))
859 from = force_reg (from_mode, from);
861 /* Get a copy of FROM widened to a word, if necessary. */
862 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
863 lowpart_mode = word_mode;
864 else
865 lowpart_mode = from_mode;
867 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
869 lowpart = gen_lowpart (lowpart_mode, to);
870 emit_move_insn (lowpart, lowfrom);
872 /* Compute the value to put in each remaining word. */
873 if (unsignedp)
874 fill_value = const0_rtx;
875 else
877 #ifdef HAVE_slt
878 if (HAVE_slt
879 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
880 && STORE_FLAG_VALUE == -1)
882 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
883 lowpart_mode, 0, 0);
884 fill_value = gen_reg_rtx (word_mode);
885 emit_insn (gen_slt (fill_value));
887 else
888 #endif
890 fill_value
891 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
892 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
893 NULL_RTX, 0);
894 fill_value = convert_to_mode (word_mode, fill_value, 1);
898 /* Fill the remaining words. */
899 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
901 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
902 rtx subword = operand_subword (to, index, 1, to_mode);
904 if (subword == 0)
905 abort ();
907 if (fill_value != subword)
908 emit_move_insn (subword, fill_value);
911 insns = get_insns ();
912 end_sequence ();
914 emit_no_conflict_block (insns, to, from, NULL_RTX,
915 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
916 return;
919 /* Truncating multi-word to a word or less. */
920 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
921 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
923 if (!((GET_CODE (from) == MEM
924 && ! MEM_VOLATILE_P (from)
925 && direct_load[(int) to_mode]
926 && ! mode_dependent_address_p (XEXP (from, 0)))
927 || GET_CODE (from) == REG
928 || GET_CODE (from) == SUBREG))
929 from = force_reg (from_mode, from);
930 convert_move (to, gen_lowpart (word_mode, from), 0);
931 return;
934 /* Handle pointer conversion. */ /* SPEE 900220. */
935 if (to_mode == PQImode)
937 if (from_mode != QImode)
938 from = convert_to_mode (QImode, from, unsignedp);
940 #ifdef HAVE_truncqipqi2
941 if (HAVE_truncqipqi2)
943 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
944 return;
946 #endif /* HAVE_truncqipqi2 */
947 abort ();
950 if (from_mode == PQImode)
952 if (to_mode != QImode)
954 from = convert_to_mode (QImode, from, unsignedp);
955 from_mode = QImode;
957 else
959 #ifdef HAVE_extendpqiqi2
960 if (HAVE_extendpqiqi2)
962 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
963 return;
965 #endif /* HAVE_extendpqiqi2 */
966 abort ();
970 if (to_mode == PSImode)
972 if (from_mode != SImode)
973 from = convert_to_mode (SImode, from, unsignedp);
975 #ifdef HAVE_truncsipsi2
976 if (HAVE_truncsipsi2)
978 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
979 return;
981 #endif /* HAVE_truncsipsi2 */
982 abort ();
985 if (from_mode == PSImode)
987 if (to_mode != SImode)
989 from = convert_to_mode (SImode, from, unsignedp);
990 from_mode = SImode;
992 else
994 #ifdef HAVE_extendpsisi2
995 if (HAVE_extendpsisi2)
997 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
998 return;
1000 #endif /* HAVE_extendpsisi2 */
1001 abort ();
1005 if (to_mode == PDImode)
1007 if (from_mode != DImode)
1008 from = convert_to_mode (DImode, from, unsignedp);
1010 #ifdef HAVE_truncdipdi2
1011 if (HAVE_truncdipdi2)
1013 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1014 return;
1016 #endif /* HAVE_truncdipdi2 */
1017 abort ();
1020 if (from_mode == PDImode)
1022 if (to_mode != DImode)
1024 from = convert_to_mode (DImode, from, unsignedp);
1025 from_mode = DImode;
1027 else
1029 #ifdef HAVE_extendpdidi2
1030 if (HAVE_extendpdidi2)
1032 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1033 return;
1035 #endif /* HAVE_extendpdidi2 */
1036 abort ();
1040 /* Now follow all the conversions between integers
1041 no more than a word long. */
1043 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1044 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1045 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1046 GET_MODE_BITSIZE (from_mode)))
1048 if (!((GET_CODE (from) == MEM
1049 && ! MEM_VOLATILE_P (from)
1050 && direct_load[(int) to_mode]
1051 && ! mode_dependent_address_p (XEXP (from, 0)))
1052 || GET_CODE (from) == REG
1053 || GET_CODE (from) == SUBREG))
1054 from = force_reg (from_mode, from);
1055 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1056 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1057 from = copy_to_reg (from);
1058 emit_move_insn (to, gen_lowpart (to_mode, from));
1059 return;
1062 /* Handle extension. */
1063 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1065 /* Convert directly if that works. */
1066 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1067 != CODE_FOR_nothing)
1069 emit_unop_insn (code, to, from, equiv_code);
1070 return;
1072 else
1074 enum machine_mode intermediate;
1075 rtx tmp;
1076 tree shift_amount;
1078 /* Search for a mode to convert via. */
1079 for (intermediate = from_mode; intermediate != VOIDmode;
1080 intermediate = GET_MODE_WIDER_MODE (intermediate))
1081 if (((can_extend_p (to_mode, intermediate, unsignedp)
1082 != CODE_FOR_nothing)
1083 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1084 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1085 GET_MODE_BITSIZE (intermediate))))
1086 && (can_extend_p (intermediate, from_mode, unsignedp)
1087 != CODE_FOR_nothing))
1089 convert_move (to, convert_to_mode (intermediate, from,
1090 unsignedp), unsignedp);
1091 return;
1094 /* No suitable intermediate mode.
1095 Generate what we need with shifts. */
1096 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1097 - GET_MODE_BITSIZE (from_mode), 0);
1098 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1099 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1100 to, unsignedp);
1101 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1102 to, unsignedp);
1103 if (tmp != to)
1104 emit_move_insn (to, tmp);
1105 return;
1109 /* Support special truncate insns for certain modes. */
1111 if (from_mode == DImode && to_mode == SImode)
1113 #ifdef HAVE_truncdisi2
1114 if (HAVE_truncdisi2)
1116 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1117 return;
1119 #endif
1120 convert_move (to, force_reg (from_mode, from), unsignedp);
1121 return;
1124 if (from_mode == DImode && to_mode == HImode)
1126 #ifdef HAVE_truncdihi2
1127 if (HAVE_truncdihi2)
1129 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1130 return;
1132 #endif
1133 convert_move (to, force_reg (from_mode, from), unsignedp);
1134 return;
1137 if (from_mode == DImode && to_mode == QImode)
1139 #ifdef HAVE_truncdiqi2
1140 if (HAVE_truncdiqi2)
1142 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1143 return;
1145 #endif
1146 convert_move (to, force_reg (from_mode, from), unsignedp);
1147 return;
1150 if (from_mode == SImode && to_mode == HImode)
1152 #ifdef HAVE_truncsihi2
1153 if (HAVE_truncsihi2)
1155 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1156 return;
1158 #endif
1159 convert_move (to, force_reg (from_mode, from), unsignedp);
1160 return;
1163 if (from_mode == SImode && to_mode == QImode)
1165 #ifdef HAVE_truncsiqi2
1166 if (HAVE_truncsiqi2)
1168 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1169 return;
1171 #endif
1172 convert_move (to, force_reg (from_mode, from), unsignedp);
1173 return;
1176 if (from_mode == HImode && to_mode == QImode)
1178 #ifdef HAVE_trunchiqi2
1179 if (HAVE_trunchiqi2)
1181 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1182 return;
1184 #endif
1185 convert_move (to, force_reg (from_mode, from), unsignedp);
1186 return;
1189 if (from_mode == TImode && to_mode == DImode)
1191 #ifdef HAVE_trunctidi2
1192 if (HAVE_trunctidi2)
1194 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1195 return;
1197 #endif
1198 convert_move (to, force_reg (from_mode, from), unsignedp);
1199 return;
1202 if (from_mode == TImode && to_mode == SImode)
1204 #ifdef HAVE_trunctisi2
1205 if (HAVE_trunctisi2)
1207 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1208 return;
1210 #endif
1211 convert_move (to, force_reg (from_mode, from), unsignedp);
1212 return;
1215 if (from_mode == TImode && to_mode == HImode)
1217 #ifdef HAVE_trunctihi2
1218 if (HAVE_trunctihi2)
1220 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1221 return;
1223 #endif
1224 convert_move (to, force_reg (from_mode, from), unsignedp);
1225 return;
1228 if (from_mode == TImode && to_mode == QImode)
1230 #ifdef HAVE_trunctiqi2
1231 if (HAVE_trunctiqi2)
1233 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1234 return;
1236 #endif
1237 convert_move (to, force_reg (from_mode, from), unsignedp);
1238 return;
1241 /* Handle truncation of volatile memrefs, and so on;
1242 the things that couldn't be truncated directly,
1243 and for which there was no special instruction. */
1244 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1246 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1247 emit_move_insn (to, temp);
1248 return;
1251 /* Mode combination is not recognized. */
1252 abort ();
1255 /* Return an rtx for a value that would result
1256 from converting X to mode MODE.
1257 Both X and MODE may be floating, or both integer.
1258 UNSIGNEDP is nonzero if X is an unsigned value.
1259 This can be done by referring to a part of X in place
1260 or by copying to a new temporary with conversion.
1262 This function *must not* call protect_from_queue
1263 except when putting X into an insn (in which case convert_move does it). */
1266 convert_to_mode (mode, x, unsignedp)
1267 enum machine_mode mode;
1268 rtx x;
1269 int unsignedp;
1271 return convert_modes (mode, VOIDmode, x, unsignedp);
1274 /* Return an rtx for a value that would result
1275 from converting X from mode OLDMODE to mode MODE.
1276 Both modes may be floating, or both integer.
1277 UNSIGNEDP is nonzero if X is an unsigned value.
1279 This can be done by referring to a part of X in place
1280 or by copying to a new temporary with conversion.
1282 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1284 This function *must not* call protect_from_queue
1285 except when putting X into an insn (in which case convert_move does it). */
1288 convert_modes (mode, oldmode, x, unsignedp)
1289 enum machine_mode mode, oldmode;
1290 rtx x;
1291 int unsignedp;
1293 register rtx temp;
1295 /* If FROM is a SUBREG that indicates that we have already done at least
1296 the required extension, strip it. */
1298 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1299 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1300 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1301 x = gen_lowpart (mode, x);
1303 if (GET_MODE (x) != VOIDmode)
1304 oldmode = GET_MODE (x);
1306 if (mode == oldmode)
1307 return x;
1309 /* There is one case that we must handle specially: If we are converting
1310 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1311 we are to interpret the constant as unsigned, gen_lowpart will do
1312 the wrong if the constant appears negative. What we want to do is
1313 make the high-order word of the constant zero, not all ones. */
1315 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1316 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1317 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1319 HOST_WIDE_INT val = INTVAL (x);
1321 if (oldmode != VOIDmode
1322 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1324 int width = GET_MODE_BITSIZE (oldmode);
1326 /* We need to zero extend VAL. */
1327 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1330 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1333 /* We can do this with a gen_lowpart if both desired and current modes
1334 are integer, and this is either a constant integer, a register, or a
1335 non-volatile MEM. Except for the constant case where MODE is no
1336 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1338 if ((GET_CODE (x) == CONST_INT
1339 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1340 || (GET_MODE_CLASS (mode) == MODE_INT
1341 && GET_MODE_CLASS (oldmode) == MODE_INT
1342 && (GET_CODE (x) == CONST_DOUBLE
1343 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1344 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1345 && direct_load[(int) mode])
1346 || (GET_CODE (x) == REG
1347 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1348 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1350 /* ?? If we don't know OLDMODE, we have to assume here that
1351 X does not need sign- or zero-extension. This may not be
1352 the case, but it's the best we can do. */
1353 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1354 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1356 HOST_WIDE_INT val = INTVAL (x);
1357 int width = GET_MODE_BITSIZE (oldmode);
1359 /* We must sign or zero-extend in this case. Start by
1360 zero-extending, then sign extend if we need to. */
1361 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1362 if (! unsignedp
1363 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1364 val |= (HOST_WIDE_INT) (-1) << width;
1366 return GEN_INT (val);
1369 return gen_lowpart (mode, x);
1372 temp = gen_reg_rtx (mode);
1373 convert_move (temp, x, unsignedp);
1374 return temp;
1377 /* This macro is used to determine what the largest unit size that
1378 move_by_pieces can use is. */
1380 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1381 move efficiently, as opposed to MOVE_MAX which is the maximum
1382 number of bytes we can move with a single instruction. */
1384 #ifndef MOVE_MAX_PIECES
1385 #define MOVE_MAX_PIECES MOVE_MAX
1386 #endif
1388 /* Generate several move instructions to copy LEN bytes
1389 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1390 The caller must pass FROM and TO
1391 through protect_from_queue before calling.
1392 ALIGN is maximum alignment we can assume. */
1394 void
1395 move_by_pieces (to, from, len, align)
1396 rtx to, from;
1397 unsigned HOST_WIDE_INT len;
1398 unsigned int align;
1400 struct move_by_pieces data;
1401 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1402 unsigned int max_size = MOVE_MAX_PIECES + 1;
1403 enum machine_mode mode = VOIDmode, tmode;
1404 enum insn_code icode;
1406 data.offset = 0;
1407 data.to_addr = to_addr;
1408 data.from_addr = from_addr;
1409 data.to = to;
1410 data.from = from;
1411 data.autinc_to
1412 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1413 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1414 data.autinc_from
1415 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1416 || GET_CODE (from_addr) == POST_INC
1417 || GET_CODE (from_addr) == POST_DEC);
1419 data.explicit_inc_from = 0;
1420 data.explicit_inc_to = 0;
1421 data.reverse
1422 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1423 if (data.reverse) data.offset = len;
1424 data.len = len;
1426 /* If copying requires more than two move insns,
1427 copy addresses to registers (to make displacements shorter)
1428 and use post-increment if available. */
1429 if (!(data.autinc_from && data.autinc_to)
1430 && move_by_pieces_ninsns (len, align) > 2)
1432 /* Find the mode of the largest move... */
1433 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1434 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1435 if (GET_MODE_SIZE (tmode) < max_size)
1436 mode = tmode;
1438 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1440 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1441 data.autinc_from = 1;
1442 data.explicit_inc_from = -1;
1444 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1446 data.from_addr = copy_addr_to_reg (from_addr);
1447 data.autinc_from = 1;
1448 data.explicit_inc_from = 1;
1450 if (!data.autinc_from && CONSTANT_P (from_addr))
1451 data.from_addr = copy_addr_to_reg (from_addr);
1452 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1454 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1455 data.autinc_to = 1;
1456 data.explicit_inc_to = -1;
1458 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1460 data.to_addr = copy_addr_to_reg (to_addr);
1461 data.autinc_to = 1;
1462 data.explicit_inc_to = 1;
1464 if (!data.autinc_to && CONSTANT_P (to_addr))
1465 data.to_addr = copy_addr_to_reg (to_addr);
1468 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1469 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1470 align = MOVE_MAX * BITS_PER_UNIT;
1472 /* First move what we can in the largest integer mode, then go to
1473 successively smaller modes. */
1475 while (max_size > 1)
1477 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1478 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1479 if (GET_MODE_SIZE (tmode) < max_size)
1480 mode = tmode;
1482 if (mode == VOIDmode)
1483 break;
1485 icode = mov_optab->handlers[(int) mode].insn_code;
1486 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1487 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1489 max_size = GET_MODE_SIZE (mode);
1492 /* The code above should have handled everything. */
1493 if (data.len > 0)
1494 abort ();
1497 /* Return number of insns required to move L bytes by pieces.
1498 ALIGN (in bytes) is maximum alignment we can assume. */
1500 static unsigned HOST_WIDE_INT
1501 move_by_pieces_ninsns (l, align)
1502 unsigned HOST_WIDE_INT l;
1503 unsigned int align;
1505 unsigned HOST_WIDE_INT n_insns = 0;
1506 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1508 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1509 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1510 align = MOVE_MAX * BITS_PER_UNIT;
1512 while (max_size > 1)
1514 enum machine_mode mode = VOIDmode, tmode;
1515 enum insn_code icode;
1517 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1518 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1519 if (GET_MODE_SIZE (tmode) < max_size)
1520 mode = tmode;
1522 if (mode == VOIDmode)
1523 break;
1525 icode = mov_optab->handlers[(int) mode].insn_code;
1526 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1527 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1529 max_size = GET_MODE_SIZE (mode);
1532 return n_insns;
1535 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1536 with move instructions for mode MODE. GENFUN is the gen_... function
1537 to make a move insn for that mode. DATA has all the other info. */
1539 static void
1540 move_by_pieces_1 (genfun, mode, data)
1541 rtx (*genfun) PARAMS ((rtx, ...));
1542 enum machine_mode mode;
1543 struct move_by_pieces *data;
1545 unsigned int size = GET_MODE_SIZE (mode);
1546 rtx to1, from1;
1548 while (data->len >= size)
1550 if (data->reverse)
1551 data->offset -= size;
1553 if (data->autinc_to)
1555 to1 = gen_rtx_MEM (mode, data->to_addr);
1556 MEM_COPY_ATTRIBUTES (to1, data->to);
1558 else
1559 to1 = change_address (data->to, mode,
1560 plus_constant (data->to_addr, data->offset));
1562 if (data->autinc_from)
1564 from1 = gen_rtx_MEM (mode, data->from_addr);
1565 MEM_COPY_ATTRIBUTES (from1, data->from);
1567 else
1568 from1 = change_address (data->from, mode,
1569 plus_constant (data->from_addr, data->offset));
1571 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1572 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1573 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1574 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1576 emit_insn ((*genfun) (to1, from1));
1578 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1579 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1580 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1581 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1583 if (! data->reverse)
1584 data->offset += size;
1586 data->len -= size;
1590 /* Emit code to move a block Y to a block X.
1591 This may be done with string-move instructions,
1592 with multiple scalar move instructions, or with a library call.
1594 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1595 with mode BLKmode.
1596 SIZE is an rtx that says how long they are.
1597 ALIGN is the maximum alignment we can assume they have.
1599 Return the address of the new block, if memcpy is called and returns it,
1600 0 otherwise. */
1603 emit_block_move (x, y, size, align)
1604 rtx x, y;
1605 rtx size;
1606 unsigned int align;
1608 rtx retval = 0;
1609 #ifdef TARGET_MEM_FUNCTIONS
1610 static tree fn;
1611 tree call_expr, arg_list;
1612 #endif
1614 if (GET_MODE (x) != BLKmode)
1615 abort ();
1617 if (GET_MODE (y) != BLKmode)
1618 abort ();
1620 x = protect_from_queue (x, 1);
1621 y = protect_from_queue (y, 0);
1622 size = protect_from_queue (size, 0);
1624 if (GET_CODE (x) != MEM)
1625 abort ();
1626 if (GET_CODE (y) != MEM)
1627 abort ();
1628 if (size == 0)
1629 abort ();
1631 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1632 move_by_pieces (x, y, INTVAL (size), align);
1633 else
1635 /* Try the most limited insn first, because there's no point
1636 including more than one in the machine description unless
1637 the more limited one has some advantage. */
1639 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1640 enum machine_mode mode;
1642 /* Since this is a move insn, we don't care about volatility. */
1643 volatile_ok = 1;
1645 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1646 mode = GET_MODE_WIDER_MODE (mode))
1648 enum insn_code code = movstr_optab[(int) mode];
1649 insn_operand_predicate_fn pred;
1651 if (code != CODE_FOR_nothing
1652 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1653 here because if SIZE is less than the mode mask, as it is
1654 returned by the macro, it will definitely be less than the
1655 actual mode mask. */
1656 && ((GET_CODE (size) == CONST_INT
1657 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1658 <= (GET_MODE_MASK (mode) >> 1)))
1659 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1660 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1661 || (*pred) (x, BLKmode))
1662 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1663 || (*pred) (y, BLKmode))
1664 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1665 || (*pred) (opalign, VOIDmode)))
1667 rtx op2;
1668 rtx last = get_last_insn ();
1669 rtx pat;
1671 op2 = convert_to_mode (mode, size, 1);
1672 pred = insn_data[(int) code].operand[2].predicate;
1673 if (pred != 0 && ! (*pred) (op2, mode))
1674 op2 = copy_to_mode_reg (mode, op2);
1676 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1677 if (pat)
1679 emit_insn (pat);
1680 volatile_ok = 0;
1681 return 0;
1683 else
1684 delete_insns_since (last);
1688 volatile_ok = 0;
1690 /* X, Y, or SIZE may have been passed through protect_from_queue.
1692 It is unsafe to save the value generated by protect_from_queue
1693 and reuse it later. Consider what happens if emit_queue is
1694 called before the return value from protect_from_queue is used.
1696 Expansion of the CALL_EXPR below will call emit_queue before
1697 we are finished emitting RTL for argument setup. So if we are
1698 not careful we could get the wrong value for an argument.
1700 To avoid this problem we go ahead and emit code to copy X, Y &
1701 SIZE into new pseudos. We can then place those new pseudos
1702 into an RTL_EXPR and use them later, even after a call to
1703 emit_queue.
1705 Note this is not strictly needed for library calls since they
1706 do not call emit_queue before loading their arguments. However,
1707 we may need to have library calls call emit_queue in the future
1708 since failing to do so could cause problems for targets which
1709 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1710 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1711 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1713 #ifdef TARGET_MEM_FUNCTIONS
1714 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1715 #else
1716 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1717 TREE_UNSIGNED (integer_type_node));
1718 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1719 #endif
1721 #ifdef TARGET_MEM_FUNCTIONS
1722 /* It is incorrect to use the libcall calling conventions to call
1723 memcpy in this context.
1725 This could be a user call to memcpy and the user may wish to
1726 examine the return value from memcpy.
1728 For targets where libcalls and normal calls have different conventions
1729 for returning pointers, we could end up generating incorrect code.
1731 So instead of using a libcall sequence we build up a suitable
1732 CALL_EXPR and expand the call in the normal fashion. */
1733 if (fn == NULL_TREE)
1735 tree fntype;
1737 /* This was copied from except.c, I don't know if all this is
1738 necessary in this context or not. */
1739 fn = get_identifier ("memcpy");
1740 push_obstacks_nochange ();
1741 end_temporary_allocation ();
1742 fntype = build_pointer_type (void_type_node);
1743 fntype = build_function_type (fntype, NULL_TREE);
1744 fn = build_decl (FUNCTION_DECL, fn, fntype);
1745 ggc_add_tree_root (&fn, 1);
1746 DECL_EXTERNAL (fn) = 1;
1747 TREE_PUBLIC (fn) = 1;
1748 DECL_ARTIFICIAL (fn) = 1;
1749 make_decl_rtl (fn, NULL_PTR, 1);
1750 assemble_external (fn);
1751 pop_obstacks ();
1754 /* We need to make an argument list for the function call.
1756 memcpy has three arguments, the first two are void * addresses and
1757 the last is a size_t byte count for the copy. */
1758 arg_list
1759 = build_tree_list (NULL_TREE,
1760 make_tree (build_pointer_type (void_type_node), x));
1761 TREE_CHAIN (arg_list)
1762 = build_tree_list (NULL_TREE,
1763 make_tree (build_pointer_type (void_type_node), y));
1764 TREE_CHAIN (TREE_CHAIN (arg_list))
1765 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1766 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1768 /* Now we have to build up the CALL_EXPR itself. */
1769 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1770 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1771 call_expr, arg_list, NULL_TREE);
1772 TREE_SIDE_EFFECTS (call_expr) = 1;
1774 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1775 #else
1776 emit_library_call (bcopy_libfunc, 0,
1777 VOIDmode, 3, y, Pmode, x, Pmode,
1778 convert_to_mode (TYPE_MODE (integer_type_node), size,
1779 TREE_UNSIGNED (integer_type_node)),
1780 TYPE_MODE (integer_type_node));
1781 #endif
1784 return retval;
1787 /* Copy all or part of a value X into registers starting at REGNO.
1788 The number of registers to be filled is NREGS. */
1790 void
1791 move_block_to_reg (regno, x, nregs, mode)
1792 int regno;
1793 rtx x;
1794 int nregs;
1795 enum machine_mode mode;
1797 int i;
1798 #ifdef HAVE_load_multiple
1799 rtx pat;
1800 rtx last;
1801 #endif
1803 if (nregs == 0)
1804 return;
1806 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1807 x = validize_mem (force_const_mem (mode, x));
1809 /* See if the machine can do this with a load multiple insn. */
1810 #ifdef HAVE_load_multiple
1811 if (HAVE_load_multiple)
1813 last = get_last_insn ();
1814 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1815 GEN_INT (nregs));
1816 if (pat)
1818 emit_insn (pat);
1819 return;
1821 else
1822 delete_insns_since (last);
1824 #endif
1826 for (i = 0; i < nregs; i++)
1827 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1828 operand_subword_force (x, i, mode));
1831 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1832 The number of registers to be filled is NREGS. SIZE indicates the number
1833 of bytes in the object X. */
1835 void
1836 move_block_from_reg (regno, x, nregs, size)
1837 int regno;
1838 rtx x;
1839 int nregs;
1840 int size;
1842 int i;
1843 #ifdef HAVE_store_multiple
1844 rtx pat;
1845 rtx last;
1846 #endif
1847 enum machine_mode mode;
1849 /* If SIZE is that of a mode no bigger than a word, just use that
1850 mode's store operation. */
1851 if (size <= UNITS_PER_WORD
1852 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1854 emit_move_insn (change_address (x, mode, NULL),
1855 gen_rtx_REG (mode, regno));
1856 return;
1859 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1860 to the left before storing to memory. Note that the previous test
1861 doesn't handle all cases (e.g. SIZE == 3). */
1862 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1864 rtx tem = operand_subword (x, 0, 1, BLKmode);
1865 rtx shift;
1867 if (tem == 0)
1868 abort ();
1870 shift = expand_shift (LSHIFT_EXPR, word_mode,
1871 gen_rtx_REG (word_mode, regno),
1872 build_int_2 ((UNITS_PER_WORD - size)
1873 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1874 emit_move_insn (tem, shift);
1875 return;
1878 /* See if the machine can do this with a store multiple insn. */
1879 #ifdef HAVE_store_multiple
1880 if (HAVE_store_multiple)
1882 last = get_last_insn ();
1883 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1884 GEN_INT (nregs));
1885 if (pat)
1887 emit_insn (pat);
1888 return;
1890 else
1891 delete_insns_since (last);
1893 #endif
1895 for (i = 0; i < nregs; i++)
1897 rtx tem = operand_subword (x, i, 1, BLKmode);
1899 if (tem == 0)
1900 abort ();
1902 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1906 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1907 registers represented by a PARALLEL. SSIZE represents the total size of
1908 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1909 SRC in bits. */
1910 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1911 the balance will be in what would be the low-order memory addresses, i.e.
1912 left justified for big endian, right justified for little endian. This
1913 happens to be true for the targets currently using this support. If this
1914 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1915 would be needed. */
1917 void
1918 emit_group_load (dst, orig_src, ssize, align)
1919 rtx dst, orig_src;
1920 unsigned int align;
1921 int ssize;
1923 rtx *tmps, src;
1924 int start, i;
1926 if (GET_CODE (dst) != PARALLEL)
1927 abort ();
1929 /* Check for a NULL entry, used to indicate that the parameter goes
1930 both on the stack and in registers. */
1931 if (XEXP (XVECEXP (dst, 0, 0), 0))
1932 start = 0;
1933 else
1934 start = 1;
1936 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1938 /* If we won't be loading directly from memory, protect the real source
1939 from strange tricks we might play. */
1940 src = orig_src;
1941 if (GET_CODE (src) != MEM && ! CONSTANT_P (src))
1943 if (GET_MODE (src) == VOIDmode)
1944 src = gen_reg_rtx (GET_MODE (dst));
1945 else
1946 src = gen_reg_rtx (GET_MODE (orig_src));
1947 emit_move_insn (src, orig_src);
1950 /* Process the pieces. */
1951 for (i = start; i < XVECLEN (dst, 0); i++)
1953 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1954 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1955 unsigned int bytelen = GET_MODE_SIZE (mode);
1956 int shift = 0;
1958 /* Handle trailing fragments that run over the size of the struct. */
1959 if (ssize >= 0 && bytepos + bytelen > ssize)
1961 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1962 bytelen = ssize - bytepos;
1963 if (bytelen <= 0)
1964 abort ();
1967 /* Optimize the access just a bit. */
1968 if (GET_CODE (src) == MEM
1969 && align >= GET_MODE_ALIGNMENT (mode)
1970 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1971 && bytelen == GET_MODE_SIZE (mode))
1973 tmps[i] = gen_reg_rtx (mode);
1974 emit_move_insn (tmps[i],
1975 change_address (src, mode,
1976 plus_constant (XEXP (src, 0),
1977 bytepos)));
1979 else if (GET_CODE (src) == CONCAT)
1981 if (bytepos == 0
1982 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1983 tmps[i] = XEXP (src, 0);
1984 else if (bytepos == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1985 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1986 tmps[i] = XEXP (src, 1);
1987 else
1988 abort ();
1990 else if ((CONSTANT_P (src)
1991 && (GET_MODE (src) == VOIDmode || GET_MODE (src) == mode))
1992 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
1993 tmps[i] = src;
1994 else
1995 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1996 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1997 mode, mode, align, ssize);
1999 if (BYTES_BIG_ENDIAN && shift)
2000 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2001 tmps[i], 0, OPTAB_WIDEN);
2004 emit_queue ();
2006 /* Copy the extracted pieces into the proper (probable) hard regs. */
2007 for (i = start; i < XVECLEN (dst, 0); i++)
2008 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2011 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2012 registers represented by a PARALLEL. SSIZE represents the total size of
2013 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2015 void
2016 emit_group_store (orig_dst, src, ssize, align)
2017 rtx orig_dst, src;
2018 int ssize;
2019 unsigned int align;
2021 rtx *tmps, dst;
2022 int start, i;
2024 if (GET_CODE (src) != PARALLEL)
2025 abort ();
2027 /* Check for a NULL entry, used to indicate that the parameter goes
2028 both on the stack and in registers. */
2029 if (XEXP (XVECEXP (src, 0, 0), 0))
2030 start = 0;
2031 else
2032 start = 1;
2034 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2036 /* Copy the (probable) hard regs into pseudos. */
2037 for (i = start; i < XVECLEN (src, 0); i++)
2039 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2040 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2041 emit_move_insn (tmps[i], reg);
2043 emit_queue ();
2045 /* If we won't be storing directly into memory, protect the real destination
2046 from strange tricks we might play. */
2047 dst = orig_dst;
2048 if (GET_CODE (dst) == PARALLEL)
2050 rtx temp;
2052 /* We can get a PARALLEL dst if there is a conditional expression in
2053 a return statement. In that case, the dst and src are the same,
2054 so no action is necessary. */
2055 if (rtx_equal_p (dst, src))
2056 return;
2058 /* It is unclear if we can ever reach here, but we may as well handle
2059 it. Allocate a temporary, and split this into a store/load to/from
2060 the temporary. */
2062 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2063 emit_group_store (temp, src, ssize, align);
2064 emit_group_load (dst, temp, ssize, align);
2065 return;
2067 else if (GET_CODE (dst) != MEM)
2069 dst = gen_reg_rtx (GET_MODE (orig_dst));
2070 /* Make life a bit easier for combine. */
2071 emit_move_insn (dst, const0_rtx);
2074 /* Process the pieces. */
2075 for (i = start; i < XVECLEN (src, 0); i++)
2077 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2078 enum machine_mode mode = GET_MODE (tmps[i]);
2079 unsigned int bytelen = GET_MODE_SIZE (mode);
2081 /* Handle trailing fragments that run over the size of the struct. */
2082 if (ssize >= 0 && bytepos + bytelen > ssize)
2084 if (BYTES_BIG_ENDIAN)
2086 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2087 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2088 tmps[i], 0, OPTAB_WIDEN);
2090 bytelen = ssize - bytepos;
2093 /* Optimize the access just a bit. */
2094 if (GET_CODE (dst) == MEM
2095 && align >= GET_MODE_ALIGNMENT (mode)
2096 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2097 && bytelen == GET_MODE_SIZE (mode))
2098 emit_move_insn (change_address (dst, mode,
2099 plus_constant (XEXP (dst, 0),
2100 bytepos)),
2101 tmps[i]);
2102 else
2103 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2104 mode, tmps[i], align, ssize);
2107 emit_queue ();
2109 /* Copy from the pseudo into the (probable) hard reg. */
2110 if (GET_CODE (dst) == REG)
2111 emit_move_insn (orig_dst, dst);
2114 /* Generate code to copy a BLKmode object of TYPE out of a
2115 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2116 is null, a stack temporary is created. TGTBLK is returned.
2118 The primary purpose of this routine is to handle functions
2119 that return BLKmode structures in registers. Some machines
2120 (the PA for example) want to return all small structures
2121 in registers regardless of the structure's alignment. */
2124 copy_blkmode_from_reg (tgtblk, srcreg, type)
2125 rtx tgtblk;
2126 rtx srcreg;
2127 tree type;
2129 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2130 rtx src = NULL, dst = NULL;
2131 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2132 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2134 if (tgtblk == 0)
2136 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2137 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2138 preserve_temp_slots (tgtblk);
2141 /* This code assumes srcreg is at least a full word. If it isn't,
2142 copy it into a new pseudo which is a full word. */
2143 if (GET_MODE (srcreg) != BLKmode
2144 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2145 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2147 /* Structures whose size is not a multiple of a word are aligned
2148 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2149 machine, this means we must skip the empty high order bytes when
2150 calculating the bit offset. */
2151 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2152 big_endian_correction
2153 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2155 /* Copy the structure BITSIZE bites at a time.
2157 We could probably emit more efficient code for machines which do not use
2158 strict alignment, but it doesn't seem worth the effort at the current
2159 time. */
2160 for (bitpos = 0, xbitpos = big_endian_correction;
2161 bitpos < bytes * BITS_PER_UNIT;
2162 bitpos += bitsize, xbitpos += bitsize)
2164 /* We need a new source operand each time xbitpos is on a
2165 word boundary and when xbitpos == big_endian_correction
2166 (the first time through). */
2167 if (xbitpos % BITS_PER_WORD == 0
2168 || xbitpos == big_endian_correction)
2169 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, BLKmode);
2171 /* We need a new destination operand each time bitpos is on
2172 a word boundary. */
2173 if (bitpos % BITS_PER_WORD == 0)
2174 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2176 /* Use xbitpos for the source extraction (right justified) and
2177 xbitpos for the destination store (left justified). */
2178 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2179 extract_bit_field (src, bitsize,
2180 xbitpos % BITS_PER_WORD, 1,
2181 NULL_RTX, word_mode, word_mode,
2182 bitsize, BITS_PER_WORD),
2183 bitsize, BITS_PER_WORD);
2186 return tgtblk;
2189 /* Add a USE expression for REG to the (possibly empty) list pointed
2190 to by CALL_FUSAGE. REG must denote a hard register. */
2192 void
2193 use_reg (call_fusage, reg)
2194 rtx *call_fusage, reg;
2196 if (GET_CODE (reg) != REG
2197 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2198 abort ();
2200 *call_fusage
2201 = gen_rtx_EXPR_LIST (VOIDmode,
2202 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2205 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2206 starting at REGNO. All of these registers must be hard registers. */
2208 void
2209 use_regs (call_fusage, regno, nregs)
2210 rtx *call_fusage;
2211 int regno;
2212 int nregs;
2214 int i;
2216 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2217 abort ();
2219 for (i = 0; i < nregs; i++)
2220 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2223 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2224 PARALLEL REGS. This is for calls that pass values in multiple
2225 non-contiguous locations. The Irix 6 ABI has examples of this. */
2227 void
2228 use_group_regs (call_fusage, regs)
2229 rtx *call_fusage;
2230 rtx regs;
2232 int i;
2234 for (i = 0; i < XVECLEN (regs, 0); i++)
2236 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2238 /* A NULL entry means the parameter goes both on the stack and in
2239 registers. This can also be a MEM for targets that pass values
2240 partially on the stack and partially in registers. */
2241 if (reg != 0 && GET_CODE (reg) == REG)
2242 use_reg (call_fusage, reg);
2246 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2247 rtx with BLKmode). The caller must pass TO through protect_from_queue
2248 before calling. ALIGN is maximum alignment we can assume. */
2250 static void
2251 clear_by_pieces (to, len, align)
2252 rtx to;
2253 unsigned HOST_WIDE_INT len;
2254 unsigned int align;
2256 struct clear_by_pieces data;
2257 rtx to_addr = XEXP (to, 0);
2258 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2259 enum machine_mode mode = VOIDmode, tmode;
2260 enum insn_code icode;
2262 data.offset = 0;
2263 data.to_addr = to_addr;
2264 data.to = to;
2265 data.autinc_to
2266 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2267 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2269 data.explicit_inc_to = 0;
2270 data.reverse
2271 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2272 if (data.reverse)
2273 data.offset = len;
2274 data.len = len;
2276 /* If copying requires more than two move insns,
2277 copy addresses to registers (to make displacements shorter)
2278 and use post-increment if available. */
2279 if (!data.autinc_to
2280 && move_by_pieces_ninsns (len, align) > 2)
2282 /* Determine the main mode we'll be using. */
2283 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2284 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2285 if (GET_MODE_SIZE (tmode) < max_size)
2286 mode = tmode;
2288 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2290 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2291 data.autinc_to = 1;
2292 data.explicit_inc_to = -1;
2295 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse
2296 && ! data.autinc_to)
2298 data.to_addr = copy_addr_to_reg (to_addr);
2299 data.autinc_to = 1;
2300 data.explicit_inc_to = 1;
2303 if ( !data.autinc_to && CONSTANT_P (to_addr))
2304 data.to_addr = copy_addr_to_reg (to_addr);
2307 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2308 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2309 align = MOVE_MAX * BITS_PER_UNIT;
2311 /* First move what we can in the largest integer mode, then go to
2312 successively smaller modes. */
2314 while (max_size > 1)
2316 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2317 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2318 if (GET_MODE_SIZE (tmode) < max_size)
2319 mode = tmode;
2321 if (mode == VOIDmode)
2322 break;
2324 icode = mov_optab->handlers[(int) mode].insn_code;
2325 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2326 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2328 max_size = GET_MODE_SIZE (mode);
2331 /* The code above should have handled everything. */
2332 if (data.len != 0)
2333 abort ();
2336 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2337 with move instructions for mode MODE. GENFUN is the gen_... function
2338 to make a move insn for that mode. DATA has all the other info. */
2340 static void
2341 clear_by_pieces_1 (genfun, mode, data)
2342 rtx (*genfun) PARAMS ((rtx, ...));
2343 enum machine_mode mode;
2344 struct clear_by_pieces *data;
2346 unsigned int size = GET_MODE_SIZE (mode);
2347 rtx to1;
2349 while (data->len >= size)
2351 if (data->reverse)
2352 data->offset -= size;
2354 if (data->autinc_to)
2356 to1 = gen_rtx_MEM (mode, data->to_addr);
2357 MEM_COPY_ATTRIBUTES (to1, data->to);
2359 else
2360 to1 = change_address (data->to, mode,
2361 plus_constant (data->to_addr, data->offset));
2363 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2364 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2366 emit_insn ((*genfun) (to1, const0_rtx));
2368 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2369 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2371 if (! data->reverse)
2372 data->offset += size;
2374 data->len -= size;
2378 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2379 its length in bytes and ALIGN is the maximum alignment we can is has.
2381 If we call a function that returns the length of the block, return it. */
2384 clear_storage (object, size, align)
2385 rtx object;
2386 rtx size;
2387 unsigned int align;
2389 #ifdef TARGET_MEM_FUNCTIONS
2390 static tree fn;
2391 tree call_expr, arg_list;
2392 #endif
2393 rtx retval = 0;
2395 if (GET_MODE (object) == BLKmode)
2397 object = protect_from_queue (object, 1);
2398 size = protect_from_queue (size, 0);
2400 if (GET_CODE (size) == CONST_INT
2401 && MOVE_BY_PIECES_P (INTVAL (size), align))
2402 clear_by_pieces (object, INTVAL (size), align);
2403 else
2405 /* Try the most limited insn first, because there's no point
2406 including more than one in the machine description unless
2407 the more limited one has some advantage. */
2409 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2410 enum machine_mode mode;
2412 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2413 mode = GET_MODE_WIDER_MODE (mode))
2415 enum insn_code code = clrstr_optab[(int) mode];
2416 insn_operand_predicate_fn pred;
2418 if (code != CODE_FOR_nothing
2419 /* We don't need MODE to be narrower than
2420 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2421 the mode mask, as it is returned by the macro, it will
2422 definitely be less than the actual mode mask. */
2423 && ((GET_CODE (size) == CONST_INT
2424 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2425 <= (GET_MODE_MASK (mode) >> 1)))
2426 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2427 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2428 || (*pred) (object, BLKmode))
2429 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2430 || (*pred) (opalign, VOIDmode)))
2432 rtx op1;
2433 rtx last = get_last_insn ();
2434 rtx pat;
2436 op1 = convert_to_mode (mode, size, 1);
2437 pred = insn_data[(int) code].operand[1].predicate;
2438 if (pred != 0 && ! (*pred) (op1, mode))
2439 op1 = copy_to_mode_reg (mode, op1);
2441 pat = GEN_FCN ((int) code) (object, op1, opalign);
2442 if (pat)
2444 emit_insn (pat);
2445 return 0;
2447 else
2448 delete_insns_since (last);
2452 /* OBJECT or SIZE may have been passed through protect_from_queue.
2454 It is unsafe to save the value generated by protect_from_queue
2455 and reuse it later. Consider what happens if emit_queue is
2456 called before the return value from protect_from_queue is used.
2458 Expansion of the CALL_EXPR below will call emit_queue before
2459 we are finished emitting RTL for argument setup. So if we are
2460 not careful we could get the wrong value for an argument.
2462 To avoid this problem we go ahead and emit code to copy OBJECT
2463 and SIZE into new pseudos. We can then place those new pseudos
2464 into an RTL_EXPR and use them later, even after a call to
2465 emit_queue.
2467 Note this is not strictly needed for library calls since they
2468 do not call emit_queue before loading their arguments. However,
2469 we may need to have library calls call emit_queue in the future
2470 since failing to do so could cause problems for targets which
2471 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2472 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2474 #ifdef TARGET_MEM_FUNCTIONS
2475 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2476 #else
2477 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2478 TREE_UNSIGNED (integer_type_node));
2479 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2480 #endif
2482 #ifdef TARGET_MEM_FUNCTIONS
2483 /* It is incorrect to use the libcall calling conventions to call
2484 memset in this context.
2486 This could be a user call to memset and the user may wish to
2487 examine the return value from memset.
2489 For targets where libcalls and normal calls have different
2490 conventions for returning pointers, we could end up generating
2491 incorrect code.
2493 So instead of using a libcall sequence we build up a suitable
2494 CALL_EXPR and expand the call in the normal fashion. */
2495 if (fn == NULL_TREE)
2497 tree fntype;
2499 /* This was copied from except.c, I don't know if all this is
2500 necessary in this context or not. */
2501 fn = get_identifier ("memset");
2502 push_obstacks_nochange ();
2503 end_temporary_allocation ();
2504 fntype = build_pointer_type (void_type_node);
2505 fntype = build_function_type (fntype, NULL_TREE);
2506 fn = build_decl (FUNCTION_DECL, fn, fntype);
2507 ggc_add_tree_root (&fn, 1);
2508 DECL_EXTERNAL (fn) = 1;
2509 TREE_PUBLIC (fn) = 1;
2510 DECL_ARTIFICIAL (fn) = 1;
2511 make_decl_rtl (fn, NULL_PTR, 1);
2512 assemble_external (fn);
2513 pop_obstacks ();
2516 /* We need to make an argument list for the function call.
2518 memset has three arguments, the first is a void * addresses, the
2519 second a integer with the initialization value, the last is a
2520 size_t byte count for the copy. */
2521 arg_list
2522 = build_tree_list (NULL_TREE,
2523 make_tree (build_pointer_type (void_type_node),
2524 object));
2525 TREE_CHAIN (arg_list)
2526 = build_tree_list (NULL_TREE,
2527 make_tree (integer_type_node, const0_rtx));
2528 TREE_CHAIN (TREE_CHAIN (arg_list))
2529 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2530 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2532 /* Now we have to build up the CALL_EXPR itself. */
2533 call_expr = build1 (ADDR_EXPR,
2534 build_pointer_type (TREE_TYPE (fn)), fn);
2535 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2536 call_expr, arg_list, NULL_TREE);
2537 TREE_SIDE_EFFECTS (call_expr) = 1;
2539 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2540 #else
2541 emit_library_call (bzero_libfunc, 0,
2542 VOIDmode, 2, object, Pmode, size,
2543 TYPE_MODE (integer_type_node));
2544 #endif
2547 else
2548 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2550 return retval;
2553 /* Generate code to copy Y into X.
2554 Both Y and X must have the same mode, except that
2555 Y can be a constant with VOIDmode.
2556 This mode cannot be BLKmode; use emit_block_move for that.
2558 Return the last instruction emitted. */
2561 emit_move_insn (x, y)
2562 rtx x, y;
2564 enum machine_mode mode = GET_MODE (x);
2566 x = protect_from_queue (x, 1);
2567 y = protect_from_queue (y, 0);
2569 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2570 abort ();
2572 /* Never force constant_p_rtx to memory. */
2573 if (GET_CODE (y) == CONSTANT_P_RTX)
2575 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2576 y = force_const_mem (mode, y);
2578 /* If X or Y are memory references, verify that their addresses are valid
2579 for the machine. */
2580 if (GET_CODE (x) == MEM
2581 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2582 && ! push_operand (x, GET_MODE (x)))
2583 || (flag_force_addr
2584 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2585 x = change_address (x, VOIDmode, XEXP (x, 0));
2587 if (GET_CODE (y) == MEM
2588 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2589 || (flag_force_addr
2590 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2591 y = change_address (y, VOIDmode, XEXP (y, 0));
2593 if (mode == BLKmode)
2594 abort ();
2596 return emit_move_insn_1 (x, y);
2599 /* Low level part of emit_move_insn.
2600 Called just like emit_move_insn, but assumes X and Y
2601 are basically valid. */
2604 emit_move_insn_1 (x, y)
2605 rtx x, y;
2607 enum machine_mode mode = GET_MODE (x);
2608 enum machine_mode submode;
2609 enum mode_class class = GET_MODE_CLASS (mode);
2610 unsigned int i;
2612 if (mode >= MAX_MACHINE_MODE)
2613 abort ();
2615 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2616 return
2617 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2619 /* Expand complex moves by moving real part and imag part, if possible. */
2620 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2621 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2622 * BITS_PER_UNIT),
2623 (class == MODE_COMPLEX_INT
2624 ? MODE_INT : MODE_FLOAT),
2626 && (mov_optab->handlers[(int) submode].insn_code
2627 != CODE_FOR_nothing))
2629 /* Don't split destination if it is a stack push. */
2630 int stack = push_operand (x, GET_MODE (x));
2632 /* If this is a stack, push the highpart first, so it
2633 will be in the argument order.
2635 In that case, change_address is used only to convert
2636 the mode, not to change the address. */
2637 if (stack)
2639 /* Note that the real part always precedes the imag part in memory
2640 regardless of machine's endianness. */
2641 #ifdef STACK_GROWS_DOWNWARD
2642 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2643 (gen_rtx_MEM (submode, XEXP (x, 0)),
2644 gen_imagpart (submode, y)));
2645 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2646 (gen_rtx_MEM (submode, XEXP (x, 0)),
2647 gen_realpart (submode, y)));
2648 #else
2649 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2650 (gen_rtx_MEM (submode, XEXP (x, 0)),
2651 gen_realpart (submode, y)));
2652 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2653 (gen_rtx_MEM (submode, XEXP (x, 0)),
2654 gen_imagpart (submode, y)));
2655 #endif
2657 else
2659 rtx realpart_x, realpart_y;
2660 rtx imagpart_x, imagpart_y;
2662 /* If this is a complex value with each part being smaller than a
2663 word, the usual calling sequence will likely pack the pieces into
2664 a single register. Unfortunately, SUBREG of hard registers only
2665 deals in terms of words, so we have a problem converting input
2666 arguments to the CONCAT of two registers that is used elsewhere
2667 for complex values. If this is before reload, we can copy it into
2668 memory and reload. FIXME, we should see about using extract and
2669 insert on integer registers, but complex short and complex char
2670 variables should be rarely used. */
2671 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2672 && (reload_in_progress | reload_completed) == 0)
2674 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2675 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2677 if (packed_dest_p || packed_src_p)
2679 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2680 ? MODE_FLOAT : MODE_INT);
2682 enum machine_mode reg_mode =
2683 mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2685 if (reg_mode != BLKmode)
2687 rtx mem = assign_stack_temp (reg_mode,
2688 GET_MODE_SIZE (mode), 0);
2690 rtx cmem = change_address (mem, mode, NULL_RTX);
2692 cfun->cannot_inline = N_("function using short complex types cannot be inline");
2694 if (packed_dest_p)
2696 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2697 emit_move_insn_1 (cmem, y);
2698 return emit_move_insn_1 (sreg, mem);
2700 else
2702 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2703 emit_move_insn_1 (mem, sreg);
2704 return emit_move_insn_1 (x, cmem);
2710 realpart_x = gen_realpart (submode, x);
2711 realpart_y = gen_realpart (submode, y);
2712 imagpart_x = gen_imagpart (submode, x);
2713 imagpart_y = gen_imagpart (submode, y);
2715 /* Show the output dies here. This is necessary for SUBREGs
2716 of pseudos since we cannot track their lifetimes correctly;
2717 hard regs shouldn't appear here except as return values.
2718 We never want to emit such a clobber after reload. */
2719 if (x != y
2720 && ! (reload_in_progress || reload_completed)
2721 && (GET_CODE (realpart_x) == SUBREG
2722 || GET_CODE (imagpart_x) == SUBREG))
2724 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2727 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2728 (realpart_x, realpart_y));
2729 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2730 (imagpart_x, imagpart_y));
2733 return get_last_insn ();
2736 /* This will handle any multi-word mode that lacks a move_insn pattern.
2737 However, you will get better code if you define such patterns,
2738 even if they must turn into multiple assembler instructions. */
2739 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2741 rtx last_insn = 0;
2742 rtx seq, inner;
2743 int need_clobber;
2745 #ifdef PUSH_ROUNDING
2747 /* If X is a push on the stack, do the push now and replace
2748 X with a reference to the stack pointer. */
2749 if (push_operand (x, GET_MODE (x)))
2751 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2752 x = change_address (x, VOIDmode, stack_pointer_rtx);
2754 #endif
2756 /* If we are in reload, see if either operand is a MEM whose address
2757 is scheduled for replacement. */
2758 if (reload_in_progress && GET_CODE (x) == MEM
2759 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2761 rtx new = gen_rtx_MEM (GET_MODE (x), inner);
2763 MEM_COPY_ATTRIBUTES (new, x);
2764 x = new;
2766 if (reload_in_progress && GET_CODE (y) == MEM
2767 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2769 rtx new = gen_rtx_MEM (GET_MODE (y), inner);
2771 MEM_COPY_ATTRIBUTES (new, y);
2772 y = new;
2775 start_sequence ();
2777 need_clobber = 0;
2778 for (i = 0;
2779 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2780 i++)
2782 rtx xpart = operand_subword (x, i, 1, mode);
2783 rtx ypart = operand_subword (y, i, 1, mode);
2785 /* If we can't get a part of Y, put Y into memory if it is a
2786 constant. Otherwise, force it into a register. If we still
2787 can't get a part of Y, abort. */
2788 if (ypart == 0 && CONSTANT_P (y))
2790 y = force_const_mem (mode, y);
2791 ypart = operand_subword (y, i, 1, mode);
2793 else if (ypart == 0)
2794 ypart = operand_subword_force (y, i, mode);
2796 if (xpart == 0 || ypart == 0)
2797 abort ();
2799 need_clobber |= (GET_CODE (xpart) == SUBREG);
2801 last_insn = emit_move_insn (xpart, ypart);
2804 seq = gen_sequence ();
2805 end_sequence ();
2807 /* Show the output dies here. This is necessary for SUBREGs
2808 of pseudos since we cannot track their lifetimes correctly;
2809 hard regs shouldn't appear here except as return values.
2810 We never want to emit such a clobber after reload. */
2811 if (x != y
2812 && ! (reload_in_progress || reload_completed)
2813 && need_clobber != 0)
2815 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2818 emit_insn (seq);
2820 return last_insn;
2822 else
2823 abort ();
2826 /* Pushing data onto the stack. */
2828 /* Push a block of length SIZE (perhaps variable)
2829 and return an rtx to address the beginning of the block.
2830 Note that it is not possible for the value returned to be a QUEUED.
2831 The value may be virtual_outgoing_args_rtx.
2833 EXTRA is the number of bytes of padding to push in addition to SIZE.
2834 BELOW nonzero means this padding comes at low addresses;
2835 otherwise, the padding comes at high addresses. */
2838 push_block (size, extra, below)
2839 rtx size;
2840 int extra, below;
2842 register rtx temp;
2844 size = convert_modes (Pmode, ptr_mode, size, 1);
2845 if (CONSTANT_P (size))
2846 anti_adjust_stack (plus_constant (size, extra));
2847 else if (GET_CODE (size) == REG && extra == 0)
2848 anti_adjust_stack (size);
2849 else
2851 temp = copy_to_mode_reg (Pmode, size);
2852 if (extra != 0)
2853 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2854 temp, 0, OPTAB_LIB_WIDEN);
2855 anti_adjust_stack (temp);
2858 #ifndef STACK_GROWS_DOWNWARD
2859 #ifdef ARGS_GROW_DOWNWARD
2860 if (!ACCUMULATE_OUTGOING_ARGS)
2861 #else
2862 if (0)
2863 #endif
2864 #else
2865 if (1)
2866 #endif
2868 /* Return the lowest stack address when STACK or ARGS grow downward and
2869 we are not aaccumulating outgoing arguments (the c4x port uses such
2870 conventions). */
2871 temp = virtual_outgoing_args_rtx;
2872 if (extra != 0 && below)
2873 temp = plus_constant (temp, extra);
2875 else
2877 if (GET_CODE (size) == CONST_INT)
2878 temp = plus_constant (virtual_outgoing_args_rtx,
2879 -INTVAL (size) - (below ? 0 : extra));
2880 else if (extra != 0 && !below)
2881 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2882 negate_rtx (Pmode, plus_constant (size, extra)));
2883 else
2884 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2885 negate_rtx (Pmode, size));
2888 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2892 gen_push_operand ()
2894 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2897 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2898 block of SIZE bytes. */
2900 static rtx
2901 get_push_address (size)
2902 int size;
2904 register rtx temp;
2906 if (STACK_PUSH_CODE == POST_DEC)
2907 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2908 else if (STACK_PUSH_CODE == POST_INC)
2909 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2910 else
2911 temp = stack_pointer_rtx;
2913 return copy_to_reg (temp);
2916 /* Generate code to push X onto the stack, assuming it has mode MODE and
2917 type TYPE.
2918 MODE is redundant except when X is a CONST_INT (since they don't
2919 carry mode info).
2920 SIZE is an rtx for the size of data to be copied (in bytes),
2921 needed only if X is BLKmode.
2923 ALIGN is maximum alignment we can assume.
2925 If PARTIAL and REG are both nonzero, then copy that many of the first
2926 words of X into registers starting with REG, and push the rest of X.
2927 The amount of space pushed is decreased by PARTIAL words,
2928 rounded *down* to a multiple of PARM_BOUNDARY.
2929 REG must be a hard register in this case.
2930 If REG is zero but PARTIAL is not, take any all others actions for an
2931 argument partially in registers, but do not actually load any
2932 registers.
2934 EXTRA is the amount in bytes of extra space to leave next to this arg.
2935 This is ignored if an argument block has already been allocated.
2937 On a machine that lacks real push insns, ARGS_ADDR is the address of
2938 the bottom of the argument block for this call. We use indexing off there
2939 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2940 argument block has not been preallocated.
2942 ARGS_SO_FAR is the size of args previously pushed for this call.
2944 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2945 for arguments passed in registers. If nonzero, it will be the number
2946 of bytes required. */
2948 void
2949 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2950 args_addr, args_so_far, reg_parm_stack_space,
2951 alignment_pad)
2952 register rtx x;
2953 enum machine_mode mode;
2954 tree type;
2955 rtx size;
2956 unsigned int align;
2957 int partial;
2958 rtx reg;
2959 int extra;
2960 rtx args_addr;
2961 rtx args_so_far;
2962 int reg_parm_stack_space;
2963 rtx alignment_pad;
2965 rtx xinner;
2966 enum direction stack_direction
2967 #ifdef STACK_GROWS_DOWNWARD
2968 = downward;
2969 #else
2970 = upward;
2971 #endif
2973 /* Decide where to pad the argument: `downward' for below,
2974 `upward' for above, or `none' for don't pad it.
2975 Default is below for small data on big-endian machines; else above. */
2976 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2978 /* Invert direction if stack is post-update. */
2979 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2980 if (where_pad != none)
2981 where_pad = (where_pad == downward ? upward : downward);
2983 xinner = x = protect_from_queue (x, 0);
2985 if (mode == BLKmode)
2987 /* Copy a block into the stack, entirely or partially. */
2989 register rtx temp;
2990 int used = partial * UNITS_PER_WORD;
2991 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2992 int skip;
2994 if (size == 0)
2995 abort ();
2997 used -= offset;
2999 /* USED is now the # of bytes we need not copy to the stack
3000 because registers will take care of them. */
3002 if (partial != 0)
3003 xinner = change_address (xinner, BLKmode,
3004 plus_constant (XEXP (xinner, 0), used));
3006 /* If the partial register-part of the arg counts in its stack size,
3007 skip the part of stack space corresponding to the registers.
3008 Otherwise, start copying to the beginning of the stack space,
3009 by setting SKIP to 0. */
3010 skip = (reg_parm_stack_space == 0) ? 0 : used;
3012 #ifdef PUSH_ROUNDING
3013 /* Do it with several push insns if that doesn't take lots of insns
3014 and if there is no difficulty with push insns that skip bytes
3015 on the stack for alignment purposes. */
3016 if (args_addr == 0
3017 && PUSH_ARGS
3018 && GET_CODE (size) == CONST_INT
3019 && skip == 0
3020 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3021 /* Here we avoid the case of a structure whose weak alignment
3022 forces many pushes of a small amount of data,
3023 and such small pushes do rounding that causes trouble. */
3024 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3025 || align >= BIGGEST_ALIGNMENT
3026 || PUSH_ROUNDING (align) == align)
3027 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3029 /* Push padding now if padding above and stack grows down,
3030 or if padding below and stack grows up.
3031 But if space already allocated, this has already been done. */
3032 if (extra && args_addr == 0
3033 && where_pad != none && where_pad != stack_direction)
3034 anti_adjust_stack (GEN_INT (extra));
3036 stack_pointer_delta += INTVAL (size) - used;
3037 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
3038 INTVAL (size) - used, align);
3040 if (current_function_check_memory_usage && ! in_check_memory_usage)
3042 rtx temp;
3044 in_check_memory_usage = 1;
3045 temp = get_push_address (INTVAL (size) - used);
3046 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3047 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3048 temp, Pmode,
3049 XEXP (xinner, 0), Pmode,
3050 GEN_INT (INTVAL (size) - used),
3051 TYPE_MODE (sizetype));
3052 else
3053 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3054 temp, Pmode,
3055 GEN_INT (INTVAL (size) - used),
3056 TYPE_MODE (sizetype),
3057 GEN_INT (MEMORY_USE_RW),
3058 TYPE_MODE (integer_type_node));
3059 in_check_memory_usage = 0;
3062 else
3063 #endif /* PUSH_ROUNDING */
3065 rtx target;
3067 /* Otherwise make space on the stack and copy the data
3068 to the address of that space. */
3070 /* Deduct words put into registers from the size we must copy. */
3071 if (partial != 0)
3073 if (GET_CODE (size) == CONST_INT)
3074 size = GEN_INT (INTVAL (size) - used);
3075 else
3076 size = expand_binop (GET_MODE (size), sub_optab, size,
3077 GEN_INT (used), NULL_RTX, 0,
3078 OPTAB_LIB_WIDEN);
3081 /* Get the address of the stack space.
3082 In this case, we do not deal with EXTRA separately.
3083 A single stack adjust will do. */
3084 if (! args_addr)
3086 temp = push_block (size, extra, where_pad == downward);
3087 extra = 0;
3089 else if (GET_CODE (args_so_far) == CONST_INT)
3090 temp = memory_address (BLKmode,
3091 plus_constant (args_addr,
3092 skip + INTVAL (args_so_far)));
3093 else
3094 temp = memory_address (BLKmode,
3095 plus_constant (gen_rtx_PLUS (Pmode,
3096 args_addr,
3097 args_so_far),
3098 skip));
3099 if (current_function_check_memory_usage && ! in_check_memory_usage)
3101 in_check_memory_usage = 1;
3102 target = copy_to_reg (temp);
3103 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3104 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3105 target, Pmode,
3106 XEXP (xinner, 0), Pmode,
3107 size, TYPE_MODE (sizetype));
3108 else
3109 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3110 target, Pmode,
3111 size, TYPE_MODE (sizetype),
3112 GEN_INT (MEMORY_USE_RW),
3113 TYPE_MODE (integer_type_node));
3114 in_check_memory_usage = 0;
3117 target = gen_rtx_MEM (BLKmode, temp);
3119 if (type != 0)
3121 set_mem_attributes (target, type, 1);
3122 /* Function incoming arguments may overlap with sibling call
3123 outgoing arguments and we cannot allow reordering of reads
3124 from function arguments with stores to outgoing arguments
3125 of sibling calls. */
3126 MEM_ALIAS_SET (target) = 0;
3129 /* TEMP is the address of the block. Copy the data there. */
3130 if (GET_CODE (size) == CONST_INT
3131 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3133 move_by_pieces (target, xinner, INTVAL (size), align);
3134 goto ret;
3136 else
3138 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3139 enum machine_mode mode;
3141 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3142 mode != VOIDmode;
3143 mode = GET_MODE_WIDER_MODE (mode))
3145 enum insn_code code = movstr_optab[(int) mode];
3146 insn_operand_predicate_fn pred;
3148 if (code != CODE_FOR_nothing
3149 && ((GET_CODE (size) == CONST_INT
3150 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3151 <= (GET_MODE_MASK (mode) >> 1)))
3152 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3153 && (!(pred = insn_data[(int) code].operand[0].predicate)
3154 || ((*pred) (target, BLKmode)))
3155 && (!(pred = insn_data[(int) code].operand[1].predicate)
3156 || ((*pred) (xinner, BLKmode)))
3157 && (!(pred = insn_data[(int) code].operand[3].predicate)
3158 || ((*pred) (opalign, VOIDmode))))
3160 rtx op2 = convert_to_mode (mode, size, 1);
3161 rtx last = get_last_insn ();
3162 rtx pat;
3164 pred = insn_data[(int) code].operand[2].predicate;
3165 if (pred != 0 && ! (*pred) (op2, mode))
3166 op2 = copy_to_mode_reg (mode, op2);
3168 pat = GEN_FCN ((int) code) (target, xinner,
3169 op2, opalign);
3170 if (pat)
3172 emit_insn (pat);
3173 goto ret;
3175 else
3176 delete_insns_since (last);
3181 if (!ACCUMULATE_OUTGOING_ARGS)
3183 /* If the source is referenced relative to the stack pointer,
3184 copy it to another register to stabilize it. We do not need
3185 to do this if we know that we won't be changing sp. */
3187 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3188 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3189 temp = copy_to_reg (temp);
3192 /* Make inhibit_defer_pop nonzero around the library call
3193 to force it to pop the bcopy-arguments right away. */
3194 NO_DEFER_POP;
3195 #ifdef TARGET_MEM_FUNCTIONS
3196 emit_library_call (memcpy_libfunc, 0,
3197 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3198 convert_to_mode (TYPE_MODE (sizetype),
3199 size, TREE_UNSIGNED (sizetype)),
3200 TYPE_MODE (sizetype));
3201 #else
3202 emit_library_call (bcopy_libfunc, 0,
3203 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3204 convert_to_mode (TYPE_MODE (integer_type_node),
3205 size,
3206 TREE_UNSIGNED (integer_type_node)),
3207 TYPE_MODE (integer_type_node));
3208 #endif
3209 OK_DEFER_POP;
3212 else if (partial > 0)
3214 /* Scalar partly in registers. */
3216 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3217 int i;
3218 int not_stack;
3219 /* # words of start of argument
3220 that we must make space for but need not store. */
3221 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3222 int args_offset = INTVAL (args_so_far);
3223 int skip;
3225 /* Push padding now if padding above and stack grows down,
3226 or if padding below and stack grows up.
3227 But if space already allocated, this has already been done. */
3228 if (extra && args_addr == 0
3229 && where_pad != none && where_pad != stack_direction)
3230 anti_adjust_stack (GEN_INT (extra));
3232 /* If we make space by pushing it, we might as well push
3233 the real data. Otherwise, we can leave OFFSET nonzero
3234 and leave the space uninitialized. */
3235 if (args_addr == 0)
3236 offset = 0;
3238 /* Now NOT_STACK gets the number of words that we don't need to
3239 allocate on the stack. */
3240 not_stack = partial - offset;
3242 /* If the partial register-part of the arg counts in its stack size,
3243 skip the part of stack space corresponding to the registers.
3244 Otherwise, start copying to the beginning of the stack space,
3245 by setting SKIP to 0. */
3246 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3248 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3249 x = validize_mem (force_const_mem (mode, x));
3251 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3252 SUBREGs of such registers are not allowed. */
3253 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3254 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3255 x = copy_to_reg (x);
3257 /* Loop over all the words allocated on the stack for this arg. */
3258 /* We can do it by words, because any scalar bigger than a word
3259 has a size a multiple of a word. */
3260 #ifndef PUSH_ARGS_REVERSED
3261 for (i = not_stack; i < size; i++)
3262 #else
3263 for (i = size - 1; i >= not_stack; i--)
3264 #endif
3265 if (i >= not_stack + offset)
3266 emit_push_insn (operand_subword_force (x, i, mode),
3267 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3268 0, args_addr,
3269 GEN_INT (args_offset + ((i - not_stack + skip)
3270 * UNITS_PER_WORD)),
3271 reg_parm_stack_space, alignment_pad);
3273 else
3275 rtx addr;
3276 rtx target = NULL_RTX;
3277 rtx dest;
3279 /* Push padding now if padding above and stack grows down,
3280 or if padding below and stack grows up.
3281 But if space already allocated, this has already been done. */
3282 if (extra && args_addr == 0
3283 && where_pad != none && where_pad != stack_direction)
3284 anti_adjust_stack (GEN_INT (extra));
3286 #ifdef PUSH_ROUNDING
3287 if (args_addr == 0 && PUSH_ARGS)
3289 addr = gen_push_operand ();
3290 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3292 else
3293 #endif
3295 if (GET_CODE (args_so_far) == CONST_INT)
3296 addr
3297 = memory_address (mode,
3298 plus_constant (args_addr,
3299 INTVAL (args_so_far)));
3300 else
3301 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3302 args_so_far));
3303 target = addr;
3306 dest = gen_rtx_MEM (mode, addr);
3307 if (type != 0)
3309 set_mem_attributes (dest, type, 1);
3310 /* Function incoming arguments may overlap with sibling call
3311 outgoing arguments and we cannot allow reordering of reads
3312 from function arguments with stores to outgoing arguments
3313 of sibling calls. */
3314 MEM_ALIAS_SET (dest) = 0;
3317 emit_move_insn (dest, x);
3319 if (current_function_check_memory_usage && ! in_check_memory_usage)
3321 in_check_memory_usage = 1;
3322 if (target == 0)
3323 target = get_push_address (GET_MODE_SIZE (mode));
3325 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3326 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3327 target, Pmode,
3328 XEXP (x, 0), Pmode,
3329 GEN_INT (GET_MODE_SIZE (mode)),
3330 TYPE_MODE (sizetype));
3331 else
3332 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3333 target, Pmode,
3334 GEN_INT (GET_MODE_SIZE (mode)),
3335 TYPE_MODE (sizetype),
3336 GEN_INT (MEMORY_USE_RW),
3337 TYPE_MODE (integer_type_node));
3338 in_check_memory_usage = 0;
3342 ret:
3343 /* If part should go in registers, copy that part
3344 into the appropriate registers. Do this now, at the end,
3345 since mem-to-mem copies above may do function calls. */
3346 if (partial > 0 && reg != 0)
3348 /* Handle calls that pass values in multiple non-contiguous locations.
3349 The Irix 6 ABI has examples of this. */
3350 if (GET_CODE (reg) == PARALLEL)
3351 emit_group_load (reg, x, -1, align); /* ??? size? */
3352 else
3353 move_block_to_reg (REGNO (reg), x, partial, mode);
3356 if (extra && args_addr == 0 && where_pad == stack_direction)
3357 anti_adjust_stack (GEN_INT (extra));
3359 if (alignment_pad && args_addr == 0)
3360 anti_adjust_stack (alignment_pad);
3363 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3364 operations. */
3366 static rtx
3367 get_subtarget (x)
3368 rtx x;
3370 return ((x == 0
3371 /* Only registers can be subtargets. */
3372 || GET_CODE (x) != REG
3373 /* If the register is readonly, it can't be set more than once. */
3374 || RTX_UNCHANGING_P (x)
3375 /* Don't use hard regs to avoid extending their life. */
3376 || REGNO (x) < FIRST_PSEUDO_REGISTER
3377 /* Avoid subtargets inside loops,
3378 since they hide some invariant expressions. */
3379 || preserve_subexpressions_p ())
3380 ? 0 : x);
3383 /* Expand an assignment that stores the value of FROM into TO.
3384 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3385 (This may contain a QUEUED rtx;
3386 if the value is constant, this rtx is a constant.)
3387 Otherwise, the returned value is NULL_RTX.
3389 SUGGEST_REG is no longer actually used.
3390 It used to mean, copy the value through a register
3391 and return that register, if that is possible.
3392 We now use WANT_VALUE to decide whether to do this. */
3395 expand_assignment (to, from, want_value, suggest_reg)
3396 tree to, from;
3397 int want_value;
3398 int suggest_reg ATTRIBUTE_UNUSED;
3400 register rtx to_rtx = 0;
3401 rtx result;
3403 /* Don't crash if the lhs of the assignment was erroneous. */
3405 if (TREE_CODE (to) == ERROR_MARK)
3407 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3408 return want_value ? result : NULL_RTX;
3411 /* Assignment of a structure component needs special treatment
3412 if the structure component's rtx is not simply a MEM.
3413 Assignment of an array element at a constant index, and assignment of
3414 an array element in an unaligned packed structure field, has the same
3415 problem. */
3417 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3418 || TREE_CODE (to) == ARRAY_REF)
3420 enum machine_mode mode1;
3421 HOST_WIDE_INT bitsize, bitpos;
3422 tree offset;
3423 int unsignedp;
3424 int volatilep = 0;
3425 tree tem;
3426 unsigned int alignment;
3428 push_temp_slots ();
3429 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3430 &unsignedp, &volatilep, &alignment);
3432 /* If we are going to use store_bit_field and extract_bit_field,
3433 make sure to_rtx will be safe for multiple use. */
3435 if (mode1 == VOIDmode && want_value)
3436 tem = stabilize_reference (tem);
3438 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3439 if (offset != 0)
3441 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3443 if (GET_CODE (to_rtx) != MEM)
3444 abort ();
3446 if (GET_MODE (offset_rtx) != ptr_mode)
3448 #ifdef POINTERS_EXTEND_UNSIGNED
3449 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3450 #else
3451 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3452 #endif
3455 /* A constant address in TO_RTX can have VOIDmode, we must not try
3456 to call force_reg for that case. Avoid that case. */
3457 if (GET_CODE (to_rtx) == MEM
3458 && GET_MODE (to_rtx) == BLKmode
3459 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3460 && bitsize
3461 && (bitpos % bitsize) == 0
3462 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3463 && alignment == GET_MODE_ALIGNMENT (mode1))
3465 rtx temp = change_address (to_rtx, mode1,
3466 plus_constant (XEXP (to_rtx, 0),
3467 (bitpos /
3468 BITS_PER_UNIT)));
3469 if (GET_CODE (XEXP (temp, 0)) == REG)
3470 to_rtx = temp;
3471 else
3472 to_rtx = change_address (to_rtx, mode1,
3473 force_reg (GET_MODE (XEXP (temp, 0)),
3474 XEXP (temp, 0)));
3475 bitpos = 0;
3478 to_rtx = change_address (to_rtx, VOIDmode,
3479 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3480 force_reg (ptr_mode,
3481 offset_rtx)));
3484 if (volatilep)
3486 if (GET_CODE (to_rtx) == MEM)
3488 /* When the offset is zero, to_rtx is the address of the
3489 structure we are storing into, and hence may be shared.
3490 We must make a new MEM before setting the volatile bit. */
3491 if (offset == 0)
3492 to_rtx = copy_rtx (to_rtx);
3494 MEM_VOLATILE_P (to_rtx) = 1;
3496 #if 0 /* This was turned off because, when a field is volatile
3497 in an object which is not volatile, the object may be in a register,
3498 and then we would abort over here. */
3499 else
3500 abort ();
3501 #endif
3504 if (TREE_CODE (to) == COMPONENT_REF
3505 && TREE_READONLY (TREE_OPERAND (to, 1)))
3507 if (offset == 0)
3508 to_rtx = copy_rtx (to_rtx);
3510 RTX_UNCHANGING_P (to_rtx) = 1;
3513 /* Check the access. */
3514 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3516 rtx to_addr;
3517 int size;
3518 int best_mode_size;
3519 enum machine_mode best_mode;
3521 best_mode = get_best_mode (bitsize, bitpos,
3522 TYPE_ALIGN (TREE_TYPE (tem)),
3523 mode1, volatilep);
3524 if (best_mode == VOIDmode)
3525 best_mode = QImode;
3527 best_mode_size = GET_MODE_BITSIZE (best_mode);
3528 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3529 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3530 size *= GET_MODE_SIZE (best_mode);
3532 /* Check the access right of the pointer. */
3533 in_check_memory_usage = 1;
3534 if (size)
3535 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3536 to_addr, Pmode,
3537 GEN_INT (size), TYPE_MODE (sizetype),
3538 GEN_INT (MEMORY_USE_WO),
3539 TYPE_MODE (integer_type_node));
3540 in_check_memory_usage = 0;
3543 /* If this is a varying-length object, we must get the address of
3544 the source and do an explicit block move. */
3545 if (bitsize < 0)
3547 unsigned int from_align;
3548 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3549 rtx inner_to_rtx
3550 = change_address (to_rtx, VOIDmode,
3551 plus_constant (XEXP (to_rtx, 0),
3552 bitpos / BITS_PER_UNIT));
3554 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
3555 MIN (alignment, from_align));
3556 free_temp_slots ();
3557 pop_temp_slots ();
3558 return to_rtx;
3560 else
3562 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3563 (want_value
3564 /* Spurious cast for HPUX compiler. */
3565 ? ((enum machine_mode)
3566 TYPE_MODE (TREE_TYPE (to)))
3567 : VOIDmode),
3568 unsignedp,
3569 alignment,
3570 int_size_in_bytes (TREE_TYPE (tem)),
3571 get_alias_set (to));
3573 preserve_temp_slots (result);
3574 free_temp_slots ();
3575 pop_temp_slots ();
3577 /* If the value is meaningful, convert RESULT to the proper mode.
3578 Otherwise, return nothing. */
3579 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3580 TYPE_MODE (TREE_TYPE (from)),
3581 result,
3582 TREE_UNSIGNED (TREE_TYPE (to)))
3583 : NULL_RTX);
3587 /* If the rhs is a function call and its value is not an aggregate,
3588 call the function before we start to compute the lhs.
3589 This is needed for correct code for cases such as
3590 val = setjmp (buf) on machines where reference to val
3591 requires loading up part of an address in a separate insn.
3593 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3594 since it might be a promoted variable where the zero- or sign- extension
3595 needs to be done. Handling this in the normal way is safe because no
3596 computation is done before the call. */
3597 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3598 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3599 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3600 && GET_CODE (DECL_RTL (to)) == REG))
3602 rtx value;
3604 push_temp_slots ();
3605 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3606 if (to_rtx == 0)
3607 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3609 /* Handle calls that return values in multiple non-contiguous locations.
3610 The Irix 6 ABI has examples of this. */
3611 if (GET_CODE (to_rtx) == PARALLEL)
3612 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3613 TYPE_ALIGN (TREE_TYPE (from)));
3614 else if (GET_MODE (to_rtx) == BLKmode)
3615 emit_block_move (to_rtx, value, expr_size (from),
3616 TYPE_ALIGN (TREE_TYPE (from)));
3617 else
3619 #ifdef POINTERS_EXTEND_UNSIGNED
3620 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3621 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3622 value = convert_memory_address (GET_MODE (to_rtx), value);
3623 #endif
3624 emit_move_insn (to_rtx, value);
3626 preserve_temp_slots (to_rtx);
3627 free_temp_slots ();
3628 pop_temp_slots ();
3629 return want_value ? to_rtx : NULL_RTX;
3632 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3633 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3635 if (to_rtx == 0)
3637 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3638 if (GET_CODE (to_rtx) == MEM)
3639 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3642 /* Don't move directly into a return register. */
3643 if (TREE_CODE (to) == RESULT_DECL
3644 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3646 rtx temp;
3648 push_temp_slots ();
3649 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3651 if (GET_CODE (to_rtx) == PARALLEL)
3652 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3653 TYPE_ALIGN (TREE_TYPE (from)));
3654 else
3655 emit_move_insn (to_rtx, temp);
3657 preserve_temp_slots (to_rtx);
3658 free_temp_slots ();
3659 pop_temp_slots ();
3660 return want_value ? to_rtx : NULL_RTX;
3663 /* In case we are returning the contents of an object which overlaps
3664 the place the value is being stored, use a safe function when copying
3665 a value through a pointer into a structure value return block. */
3666 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3667 && current_function_returns_struct
3668 && !current_function_returns_pcc_struct)
3670 rtx from_rtx, size;
3672 push_temp_slots ();
3673 size = expr_size (from);
3674 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3675 EXPAND_MEMORY_USE_DONT);
3677 /* Copy the rights of the bitmap. */
3678 if (current_function_check_memory_usage)
3679 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3680 XEXP (to_rtx, 0), Pmode,
3681 XEXP (from_rtx, 0), Pmode,
3682 convert_to_mode (TYPE_MODE (sizetype),
3683 size, TREE_UNSIGNED (sizetype)),
3684 TYPE_MODE (sizetype));
3686 #ifdef TARGET_MEM_FUNCTIONS
3687 emit_library_call (memcpy_libfunc, 0,
3688 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3689 XEXP (from_rtx, 0), Pmode,
3690 convert_to_mode (TYPE_MODE (sizetype),
3691 size, TREE_UNSIGNED (sizetype)),
3692 TYPE_MODE (sizetype));
3693 #else
3694 emit_library_call (bcopy_libfunc, 0,
3695 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3696 XEXP (to_rtx, 0), Pmode,
3697 convert_to_mode (TYPE_MODE (integer_type_node),
3698 size, TREE_UNSIGNED (integer_type_node)),
3699 TYPE_MODE (integer_type_node));
3700 #endif
3702 preserve_temp_slots (to_rtx);
3703 free_temp_slots ();
3704 pop_temp_slots ();
3705 return want_value ? to_rtx : NULL_RTX;
3708 /* Compute FROM and store the value in the rtx we got. */
3710 push_temp_slots ();
3711 result = store_expr (from, to_rtx, want_value);
3712 preserve_temp_slots (result);
3713 free_temp_slots ();
3714 pop_temp_slots ();
3715 return want_value ? result : NULL_RTX;
3718 /* Generate code for computing expression EXP,
3719 and storing the value into TARGET.
3720 TARGET may contain a QUEUED rtx.
3722 If WANT_VALUE is nonzero, return a copy of the value
3723 not in TARGET, so that we can be sure to use the proper
3724 value in a containing expression even if TARGET has something
3725 else stored in it. If possible, we copy the value through a pseudo
3726 and return that pseudo. Or, if the value is constant, we try to
3727 return the constant. In some cases, we return a pseudo
3728 copied *from* TARGET.
3730 If the mode is BLKmode then we may return TARGET itself.
3731 It turns out that in BLKmode it doesn't cause a problem.
3732 because C has no operators that could combine two different
3733 assignments into the same BLKmode object with different values
3734 with no sequence point. Will other languages need this to
3735 be more thorough?
3737 If WANT_VALUE is 0, we return NULL, to make sure
3738 to catch quickly any cases where the caller uses the value
3739 and fails to set WANT_VALUE. */
3742 store_expr (exp, target, want_value)
3743 register tree exp;
3744 register rtx target;
3745 int want_value;
3747 register rtx temp;
3748 int dont_return_target = 0;
3750 if (TREE_CODE (exp) == COMPOUND_EXPR)
3752 /* Perform first part of compound expression, then assign from second
3753 part. */
3754 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3755 emit_queue ();
3756 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3758 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3760 /* For conditional expression, get safe form of the target. Then
3761 test the condition, doing the appropriate assignment on either
3762 side. This avoids the creation of unnecessary temporaries.
3763 For non-BLKmode, it is more efficient not to do this. */
3765 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3767 emit_queue ();
3768 target = protect_from_queue (target, 1);
3770 do_pending_stack_adjust ();
3771 NO_DEFER_POP;
3772 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3773 start_cleanup_deferral ();
3774 store_expr (TREE_OPERAND (exp, 1), target, 0);
3775 end_cleanup_deferral ();
3776 emit_queue ();
3777 emit_jump_insn (gen_jump (lab2));
3778 emit_barrier ();
3779 emit_label (lab1);
3780 start_cleanup_deferral ();
3781 store_expr (TREE_OPERAND (exp, 2), target, 0);
3782 end_cleanup_deferral ();
3783 emit_queue ();
3784 emit_label (lab2);
3785 OK_DEFER_POP;
3787 return want_value ? target : NULL_RTX;
3789 else if (queued_subexp_p (target))
3790 /* If target contains a postincrement, let's not risk
3791 using it as the place to generate the rhs. */
3793 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3795 /* Expand EXP into a new pseudo. */
3796 temp = gen_reg_rtx (GET_MODE (target));
3797 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3799 else
3800 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3802 /* If target is volatile, ANSI requires accessing the value
3803 *from* the target, if it is accessed. So make that happen.
3804 In no case return the target itself. */
3805 if (! MEM_VOLATILE_P (target) && want_value)
3806 dont_return_target = 1;
3808 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3809 && GET_MODE (target) != BLKmode)
3810 /* If target is in memory and caller wants value in a register instead,
3811 arrange that. Pass TARGET as target for expand_expr so that,
3812 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3813 We know expand_expr will not use the target in that case.
3814 Don't do this if TARGET is volatile because we are supposed
3815 to write it and then read it. */
3817 temp = expand_expr (exp, target, GET_MODE (target), 0);
3818 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3819 temp = copy_to_reg (temp);
3820 dont_return_target = 1;
3822 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3823 /* If this is an scalar in a register that is stored in a wider mode
3824 than the declared mode, compute the result into its declared mode
3825 and then convert to the wider mode. Our value is the computed
3826 expression. */
3828 /* If we don't want a value, we can do the conversion inside EXP,
3829 which will often result in some optimizations. Do the conversion
3830 in two steps: first change the signedness, if needed, then
3831 the extend. But don't do this if the type of EXP is a subtype
3832 of something else since then the conversion might involve
3833 more than just converting modes. */
3834 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3835 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3837 if (TREE_UNSIGNED (TREE_TYPE (exp))
3838 != SUBREG_PROMOTED_UNSIGNED_P (target))
3840 = convert
3841 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3842 TREE_TYPE (exp)),
3843 exp);
3845 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3846 SUBREG_PROMOTED_UNSIGNED_P (target)),
3847 exp);
3850 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3852 /* If TEMP is a volatile MEM and we want a result value, make
3853 the access now so it gets done only once. Likewise if
3854 it contains TARGET. */
3855 if (GET_CODE (temp) == MEM && want_value
3856 && (MEM_VOLATILE_P (temp)
3857 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3858 temp = copy_to_reg (temp);
3860 /* If TEMP is a VOIDmode constant, use convert_modes to make
3861 sure that we properly convert it. */
3862 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3863 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3864 TYPE_MODE (TREE_TYPE (exp)), temp,
3865 SUBREG_PROMOTED_UNSIGNED_P (target));
3867 convert_move (SUBREG_REG (target), temp,
3868 SUBREG_PROMOTED_UNSIGNED_P (target));
3870 /* If we promoted a constant, change the mode back down to match
3871 target. Otherwise, the caller might get confused by a result whose
3872 mode is larger than expected. */
3874 if (want_value && GET_MODE (temp) != GET_MODE (target)
3875 && GET_MODE (temp) != VOIDmode)
3877 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
3878 SUBREG_PROMOTED_VAR_P (temp) = 1;
3879 SUBREG_PROMOTED_UNSIGNED_P (temp)
3880 = SUBREG_PROMOTED_UNSIGNED_P (target);
3883 return want_value ? temp : NULL_RTX;
3885 else
3887 temp = expand_expr (exp, target, GET_MODE (target), 0);
3888 /* Return TARGET if it's a specified hardware register.
3889 If TARGET is a volatile mem ref, either return TARGET
3890 or return a reg copied *from* TARGET; ANSI requires this.
3892 Otherwise, if TEMP is not TARGET, return TEMP
3893 if it is constant (for efficiency),
3894 or if we really want the correct value. */
3895 if (!(target && GET_CODE (target) == REG
3896 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3897 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3898 && ! rtx_equal_p (temp, target)
3899 && (CONSTANT_P (temp) || want_value))
3900 dont_return_target = 1;
3903 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3904 the same as that of TARGET, adjust the constant. This is needed, for
3905 example, in case it is a CONST_DOUBLE and we want only a word-sized
3906 value. */
3907 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3908 && TREE_CODE (exp) != ERROR_MARK
3909 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3910 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3911 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3913 if (current_function_check_memory_usage
3914 && GET_CODE (target) == MEM
3915 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3917 in_check_memory_usage = 1;
3918 if (GET_CODE (temp) == MEM)
3919 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3920 XEXP (target, 0), Pmode,
3921 XEXP (temp, 0), Pmode,
3922 expr_size (exp), TYPE_MODE (sizetype));
3923 else
3924 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3925 XEXP (target, 0), Pmode,
3926 expr_size (exp), TYPE_MODE (sizetype),
3927 GEN_INT (MEMORY_USE_WO),
3928 TYPE_MODE (integer_type_node));
3929 in_check_memory_usage = 0;
3932 /* If value was not generated in the target, store it there.
3933 Convert the value to TARGET's type first if nec. */
3934 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3935 one or both of them are volatile memory refs, we have to distinguish
3936 two cases:
3937 - expand_expr has used TARGET. In this case, we must not generate
3938 another copy. This can be detected by TARGET being equal according
3939 to == .
3940 - expand_expr has not used TARGET - that means that the source just
3941 happens to have the same RTX form. Since temp will have been created
3942 by expand_expr, it will compare unequal according to == .
3943 We must generate a copy in this case, to reach the correct number
3944 of volatile memory references. */
3946 if ((! rtx_equal_p (temp, target)
3947 || (temp != target && (side_effects_p (temp)
3948 || side_effects_p (target))))
3949 && TREE_CODE (exp) != ERROR_MARK)
3951 target = protect_from_queue (target, 1);
3952 if (GET_MODE (temp) != GET_MODE (target)
3953 && GET_MODE (temp) != VOIDmode)
3955 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3956 if (dont_return_target)
3958 /* In this case, we will return TEMP,
3959 so make sure it has the proper mode.
3960 But don't forget to store the value into TARGET. */
3961 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3962 emit_move_insn (target, temp);
3964 else
3965 convert_move (target, temp, unsignedp);
3968 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3970 /* Handle copying a string constant into an array.
3971 The string constant may be shorter than the array.
3972 So copy just the string's actual length, and clear the rest. */
3973 rtx size;
3974 rtx addr;
3976 /* Get the size of the data type of the string,
3977 which is actually the size of the target. */
3978 size = expr_size (exp);
3979 if (GET_CODE (size) == CONST_INT
3980 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3981 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
3982 else
3984 /* Compute the size of the data to copy from the string. */
3985 tree copy_size
3986 = size_binop (MIN_EXPR,
3987 make_tree (sizetype, size),
3988 size_int (TREE_STRING_LENGTH (exp)));
3989 unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
3990 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3991 VOIDmode, 0);
3992 rtx label = 0;
3994 /* Copy that much. */
3995 emit_block_move (target, temp, copy_size_rtx,
3996 TYPE_ALIGN (TREE_TYPE (exp)));
3998 /* Figure out how much is left in TARGET that we have to clear.
3999 Do all calculations in ptr_mode. */
4001 addr = XEXP (target, 0);
4002 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4004 if (GET_CODE (copy_size_rtx) == CONST_INT)
4006 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
4007 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
4008 align = MIN (align, (BITS_PER_UNIT
4009 * (INTVAL (copy_size_rtx)
4010 & - INTVAL (copy_size_rtx))));
4012 else
4014 addr = force_reg (ptr_mode, addr);
4015 addr = expand_binop (ptr_mode, add_optab, addr,
4016 copy_size_rtx, NULL_RTX, 0,
4017 OPTAB_LIB_WIDEN);
4019 size = expand_binop (ptr_mode, sub_optab, size,
4020 copy_size_rtx, NULL_RTX, 0,
4021 OPTAB_LIB_WIDEN);
4023 align = BITS_PER_UNIT;
4024 label = gen_label_rtx ();
4025 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4026 GET_MODE (size), 0, 0, label);
4028 align = MIN (align, expr_align (copy_size));
4030 if (size != const0_rtx)
4032 rtx dest = gen_rtx_MEM (BLKmode, addr);
4034 MEM_COPY_ATTRIBUTES (dest, target);
4036 /* Be sure we can write on ADDR. */
4037 in_check_memory_usage = 1;
4038 if (current_function_check_memory_usage)
4039 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
4040 addr, Pmode,
4041 size, TYPE_MODE (sizetype),
4042 GEN_INT (MEMORY_USE_WO),
4043 TYPE_MODE (integer_type_node));
4044 in_check_memory_usage = 0;
4045 clear_storage (dest, size, align);
4048 if (label)
4049 emit_label (label);
4052 /* Handle calls that return values in multiple non-contiguous locations.
4053 The Irix 6 ABI has examples of this. */
4054 else if (GET_CODE (target) == PARALLEL)
4055 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4056 TYPE_ALIGN (TREE_TYPE (exp)));
4057 else if (GET_MODE (temp) == BLKmode)
4058 emit_block_move (target, temp, expr_size (exp),
4059 TYPE_ALIGN (TREE_TYPE (exp)));
4060 else
4061 emit_move_insn (target, temp);
4064 /* If we don't want a value, return NULL_RTX. */
4065 if (! want_value)
4066 return NULL_RTX;
4068 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4069 ??? The latter test doesn't seem to make sense. */
4070 else if (dont_return_target && GET_CODE (temp) != MEM)
4071 return temp;
4073 /* Return TARGET itself if it is a hard register. */
4074 else if (want_value && GET_MODE (target) != BLKmode
4075 && ! (GET_CODE (target) == REG
4076 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4077 return copy_to_reg (target);
4079 else
4080 return target;
4083 /* Return 1 if EXP just contains zeros. */
4085 static int
4086 is_zeros_p (exp)
4087 tree exp;
4089 tree elt;
4091 switch (TREE_CODE (exp))
4093 case CONVERT_EXPR:
4094 case NOP_EXPR:
4095 case NON_LVALUE_EXPR:
4096 return is_zeros_p (TREE_OPERAND (exp, 0));
4098 case INTEGER_CST:
4099 return integer_zerop (exp);
4101 case COMPLEX_CST:
4102 return
4103 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4105 case REAL_CST:
4106 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4108 case CONSTRUCTOR:
4109 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4110 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4111 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4112 if (! is_zeros_p (TREE_VALUE (elt)))
4113 return 0;
4115 return 1;
4117 default:
4118 return 0;
4122 /* Return 1 if EXP contains mostly (3/4) zeros. */
4124 static int
4125 mostly_zeros_p (exp)
4126 tree exp;
4128 if (TREE_CODE (exp) == CONSTRUCTOR)
4130 int elts = 0, zeros = 0;
4131 tree elt = CONSTRUCTOR_ELTS (exp);
4132 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4134 /* If there are no ranges of true bits, it is all zero. */
4135 return elt == NULL_TREE;
4137 for (; elt; elt = TREE_CHAIN (elt))
4139 /* We do not handle the case where the index is a RANGE_EXPR,
4140 so the statistic will be somewhat inaccurate.
4141 We do make a more accurate count in store_constructor itself,
4142 so since this function is only used for nested array elements,
4143 this should be close enough. */
4144 if (mostly_zeros_p (TREE_VALUE (elt)))
4145 zeros++;
4146 elts++;
4149 return 4 * zeros >= 3 * elts;
4152 return is_zeros_p (exp);
4155 /* Helper function for store_constructor.
4156 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4157 TYPE is the type of the CONSTRUCTOR, not the element type.
4158 ALIGN and CLEARED are as for store_constructor.
4160 This provides a recursive shortcut back to store_constructor when it isn't
4161 necessary to go through store_field. This is so that we can pass through
4162 the cleared field to let store_constructor know that we may not have to
4163 clear a substructure if the outer structure has already been cleared. */
4165 static void
4166 store_constructor_field (target, bitsize, bitpos,
4167 mode, exp, type, align, cleared)
4168 rtx target;
4169 unsigned HOST_WIDE_INT bitsize;
4170 HOST_WIDE_INT bitpos;
4171 enum machine_mode mode;
4172 tree exp, type;
4173 unsigned int align;
4174 int cleared;
4176 if (TREE_CODE (exp) == CONSTRUCTOR
4177 && bitpos % BITS_PER_UNIT == 0
4178 /* If we have a non-zero bitpos for a register target, then we just
4179 let store_field do the bitfield handling. This is unlikely to
4180 generate unnecessary clear instructions anyways. */
4181 && (bitpos == 0 || GET_CODE (target) == MEM))
4183 if (bitpos != 0)
4184 target
4185 = change_address (target,
4186 GET_MODE (target) == BLKmode
4187 || 0 != (bitpos
4188 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4189 ? BLKmode : VOIDmode,
4190 plus_constant (XEXP (target, 0),
4191 bitpos / BITS_PER_UNIT));
4192 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4194 else
4195 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4196 int_size_in_bytes (type), 0);
4199 /* Store the value of constructor EXP into the rtx TARGET.
4200 TARGET is either a REG or a MEM.
4201 ALIGN is the maximum known alignment for TARGET.
4202 CLEARED is true if TARGET is known to have been zero'd.
4203 SIZE is the number of bytes of TARGET we are allowed to modify: this
4204 may not be the same as the size of EXP if we are assigning to a field
4205 which has been packed to exclude padding bits. */
4207 static void
4208 store_constructor (exp, target, align, cleared, size)
4209 tree exp;
4210 rtx target;
4211 unsigned int align;
4212 int cleared;
4213 HOST_WIDE_INT size;
4215 tree type = TREE_TYPE (exp);
4216 #ifdef WORD_REGISTER_OPERATIONS
4217 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4218 #endif
4220 /* We know our target cannot conflict, since safe_from_p has been called. */
4221 #if 0
4222 /* Don't try copying piece by piece into a hard register
4223 since that is vulnerable to being clobbered by EXP.
4224 Instead, construct in a pseudo register and then copy it all. */
4225 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4227 rtx temp = gen_reg_rtx (GET_MODE (target));
4228 store_constructor (exp, temp, align, cleared, size);
4229 emit_move_insn (target, temp);
4230 return;
4232 #endif
4234 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4235 || TREE_CODE (type) == QUAL_UNION_TYPE)
4237 register tree elt;
4239 /* Inform later passes that the whole union value is dead. */
4240 if ((TREE_CODE (type) == UNION_TYPE
4241 || TREE_CODE (type) == QUAL_UNION_TYPE)
4242 && ! cleared)
4244 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4246 /* If the constructor is empty, clear the union. */
4247 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4248 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4251 /* If we are building a static constructor into a register,
4252 set the initial value as zero so we can fold the value into
4253 a constant. But if more than one register is involved,
4254 this probably loses. */
4255 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4256 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4258 if (! cleared)
4259 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4261 cleared = 1;
4264 /* If the constructor has fewer fields than the structure
4265 or if we are initializing the structure to mostly zeros,
4266 clear the whole structure first. */
4267 else if (size > 0
4268 && ((list_length (CONSTRUCTOR_ELTS (exp))
4269 != fields_length (type))
4270 || mostly_zeros_p (exp)))
4272 if (! cleared)
4273 clear_storage (target, GEN_INT (size), align);
4275 cleared = 1;
4277 else if (! cleared)
4278 /* Inform later passes that the old value is dead. */
4279 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4281 /* Store each element of the constructor into
4282 the corresponding field of TARGET. */
4284 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4286 register tree field = TREE_PURPOSE (elt);
4287 #ifdef WORD_REGISTER_OPERATIONS
4288 tree value = TREE_VALUE (elt);
4289 #endif
4290 register enum machine_mode mode;
4291 HOST_WIDE_INT bitsize;
4292 HOST_WIDE_INT bitpos = 0;
4293 int unsignedp;
4294 tree offset;
4295 rtx to_rtx = target;
4297 /* Just ignore missing fields.
4298 We cleared the whole structure, above,
4299 if any fields are missing. */
4300 if (field == 0)
4301 continue;
4303 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4304 continue;
4306 if (host_integerp (DECL_SIZE (field), 1))
4307 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4308 else
4309 bitsize = -1;
4311 unsignedp = TREE_UNSIGNED (field);
4312 mode = DECL_MODE (field);
4313 if (DECL_BIT_FIELD (field))
4314 mode = VOIDmode;
4316 offset = DECL_FIELD_OFFSET (field);
4317 if (host_integerp (offset, 0)
4318 && host_integerp (bit_position (field), 0))
4320 bitpos = int_bit_position (field);
4321 offset = 0;
4323 else
4324 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4326 if (offset)
4328 rtx offset_rtx;
4330 if (contains_placeholder_p (offset))
4331 offset = build (WITH_RECORD_EXPR, sizetype,
4332 offset, make_tree (TREE_TYPE (exp), target));
4334 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4335 if (GET_CODE (to_rtx) != MEM)
4336 abort ();
4338 if (GET_MODE (offset_rtx) != ptr_mode)
4340 #ifdef POINTERS_EXTEND_UNSIGNED
4341 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4342 #else
4343 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4344 #endif
4347 to_rtx
4348 = change_address (to_rtx, VOIDmode,
4349 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4350 force_reg (ptr_mode,
4351 offset_rtx)));
4352 align = DECL_OFFSET_ALIGN (field);
4355 if (TREE_READONLY (field))
4357 if (GET_CODE (to_rtx) == MEM)
4358 to_rtx = copy_rtx (to_rtx);
4360 RTX_UNCHANGING_P (to_rtx) = 1;
4363 #ifdef WORD_REGISTER_OPERATIONS
4364 /* If this initializes a field that is smaller than a word, at the
4365 start of a word, try to widen it to a full word.
4366 This special case allows us to output C++ member function
4367 initializations in a form that the optimizers can understand. */
4368 if (GET_CODE (target) == REG
4369 && bitsize < BITS_PER_WORD
4370 && bitpos % BITS_PER_WORD == 0
4371 && GET_MODE_CLASS (mode) == MODE_INT
4372 && TREE_CODE (value) == INTEGER_CST
4373 && exp_size >= 0
4374 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4376 tree type = TREE_TYPE (value);
4377 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4379 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4380 value = convert (type, value);
4382 if (BYTES_BIG_ENDIAN)
4383 value
4384 = fold (build (LSHIFT_EXPR, type, value,
4385 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4386 bitsize = BITS_PER_WORD;
4387 mode = word_mode;
4389 #endif
4390 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4391 TREE_VALUE (elt), type, align, cleared);
4394 else if (TREE_CODE (type) == ARRAY_TYPE)
4396 register tree elt;
4397 register int i;
4398 int need_to_clear;
4399 tree domain = TYPE_DOMAIN (type);
4400 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4401 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4402 tree elttype = TREE_TYPE (type);
4404 /* If the constructor has fewer elements than the array,
4405 clear the whole array first. Similarly if this is
4406 static constructor of a non-BLKmode object. */
4407 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4408 need_to_clear = 1;
4409 else
4411 HOST_WIDE_INT count = 0, zero_count = 0;
4412 need_to_clear = 0;
4413 /* This loop is a more accurate version of the loop in
4414 mostly_zeros_p (it handles RANGE_EXPR in an index).
4415 It is also needed to check for missing elements. */
4416 for (elt = CONSTRUCTOR_ELTS (exp);
4417 elt != NULL_TREE;
4418 elt = TREE_CHAIN (elt))
4420 tree index = TREE_PURPOSE (elt);
4421 HOST_WIDE_INT this_node_count;
4423 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4425 tree lo_index = TREE_OPERAND (index, 0);
4426 tree hi_index = TREE_OPERAND (index, 1);
4428 if (! host_integerp (lo_index, 1)
4429 || ! host_integerp (hi_index, 1))
4431 need_to_clear = 1;
4432 break;
4435 this_node_count = (tree_low_cst (hi_index, 1)
4436 - tree_low_cst (lo_index, 1) + 1);
4438 else
4439 this_node_count = 1;
4440 count += this_node_count;
4441 if (mostly_zeros_p (TREE_VALUE (elt)))
4442 zero_count += this_node_count;
4444 /* Clear the entire array first if there are any missing elements,
4445 or if the incidence of zero elements is >= 75%. */
4446 if (count < maxelt - minelt + 1
4447 || 4 * zero_count >= 3 * count)
4448 need_to_clear = 1;
4450 if (need_to_clear && size > 0)
4452 if (! cleared)
4453 clear_storage (target, GEN_INT (size), align);
4454 cleared = 1;
4456 else
4457 /* Inform later passes that the old value is dead. */
4458 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4460 /* Store each element of the constructor into
4461 the corresponding element of TARGET, determined
4462 by counting the elements. */
4463 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4464 elt;
4465 elt = TREE_CHAIN (elt), i++)
4467 register enum machine_mode mode;
4468 HOST_WIDE_INT bitsize;
4469 HOST_WIDE_INT bitpos;
4470 int unsignedp;
4471 tree value = TREE_VALUE (elt);
4472 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4473 tree index = TREE_PURPOSE (elt);
4474 rtx xtarget = target;
4476 if (cleared && is_zeros_p (value))
4477 continue;
4479 unsignedp = TREE_UNSIGNED (elttype);
4480 mode = TYPE_MODE (elttype);
4481 if (mode == BLKmode)
4482 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4483 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4484 : -1);
4485 else
4486 bitsize = GET_MODE_BITSIZE (mode);
4488 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4490 tree lo_index = TREE_OPERAND (index, 0);
4491 tree hi_index = TREE_OPERAND (index, 1);
4492 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4493 struct nesting *loop;
4494 HOST_WIDE_INT lo, hi, count;
4495 tree position;
4497 /* If the range is constant and "small", unroll the loop. */
4498 if (host_integerp (lo_index, 0)
4499 && host_integerp (hi_index, 0)
4500 && (lo = tree_low_cst (lo_index, 0),
4501 hi = tree_low_cst (hi_index, 0),
4502 count = hi - lo + 1,
4503 (GET_CODE (target) != MEM
4504 || count <= 2
4505 || (host_integerp (TYPE_SIZE (elttype), 1)
4506 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4507 <= 40 * 8)))))
4509 lo -= minelt; hi -= minelt;
4510 for (; lo <= hi; lo++)
4512 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4513 store_constructor_field (target, bitsize, bitpos, mode,
4514 value, type, align, cleared);
4517 else
4519 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4520 loop_top = gen_label_rtx ();
4521 loop_end = gen_label_rtx ();
4523 unsignedp = TREE_UNSIGNED (domain);
4525 index = build_decl (VAR_DECL, NULL_TREE, domain);
4527 DECL_RTL (index) = index_r
4528 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4529 &unsignedp, 0));
4531 if (TREE_CODE (value) == SAVE_EXPR
4532 && SAVE_EXPR_RTL (value) == 0)
4534 /* Make sure value gets expanded once before the
4535 loop. */
4536 expand_expr (value, const0_rtx, VOIDmode, 0);
4537 emit_queue ();
4539 store_expr (lo_index, index_r, 0);
4540 loop = expand_start_loop (0);
4542 /* Assign value to element index. */
4543 position
4544 = convert (ssizetype,
4545 fold (build (MINUS_EXPR, TREE_TYPE (index),
4546 index, TYPE_MIN_VALUE (domain))));
4547 position = size_binop (MULT_EXPR, position,
4548 convert (ssizetype,
4549 TYPE_SIZE_UNIT (elttype)));
4551 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4552 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4553 xtarget = change_address (target, mode, addr);
4554 if (TREE_CODE (value) == CONSTRUCTOR)
4555 store_constructor (value, xtarget, align, cleared,
4556 bitsize / BITS_PER_UNIT);
4557 else
4558 store_expr (value, xtarget, 0);
4560 expand_exit_loop_if_false (loop,
4561 build (LT_EXPR, integer_type_node,
4562 index, hi_index));
4564 expand_increment (build (PREINCREMENT_EXPR,
4565 TREE_TYPE (index),
4566 index, integer_one_node), 0, 0);
4567 expand_end_loop ();
4568 emit_label (loop_end);
4571 else if ((index != 0 && ! host_integerp (index, 0))
4572 || ! host_integerp (TYPE_SIZE (elttype), 1))
4574 rtx pos_rtx, addr;
4575 tree position;
4577 if (index == 0)
4578 index = ssize_int (1);
4580 if (minelt)
4581 index = convert (ssizetype,
4582 fold (build (MINUS_EXPR, index,
4583 TYPE_MIN_VALUE (domain))));
4585 position = size_binop (MULT_EXPR, index,
4586 convert (ssizetype,
4587 TYPE_SIZE_UNIT (elttype)));
4588 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4589 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4590 xtarget = change_address (target, mode, addr);
4591 store_expr (value, xtarget, 0);
4593 else
4595 if (index != 0)
4596 bitpos = ((tree_low_cst (index, 0) - minelt)
4597 * tree_low_cst (TYPE_SIZE (elttype), 1));
4598 else
4599 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4601 store_constructor_field (target, bitsize, bitpos, mode, value,
4602 type, align, cleared);
4607 /* Set constructor assignments. */
4608 else if (TREE_CODE (type) == SET_TYPE)
4610 tree elt = CONSTRUCTOR_ELTS (exp);
4611 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4612 tree domain = TYPE_DOMAIN (type);
4613 tree domain_min, domain_max, bitlength;
4615 /* The default implementation strategy is to extract the constant
4616 parts of the constructor, use that to initialize the target,
4617 and then "or" in whatever non-constant ranges we need in addition.
4619 If a large set is all zero or all ones, it is
4620 probably better to set it using memset (if available) or bzero.
4621 Also, if a large set has just a single range, it may also be
4622 better to first clear all the first clear the set (using
4623 bzero/memset), and set the bits we want. */
4625 /* Check for all zeros. */
4626 if (elt == NULL_TREE && size > 0)
4628 if (!cleared)
4629 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
4630 return;
4633 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4634 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4635 bitlength = size_binop (PLUS_EXPR,
4636 size_diffop (domain_max, domain_min),
4637 ssize_int (1));
4639 nbits = tree_low_cst (bitlength, 1);
4641 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4642 are "complicated" (more than one range), initialize (the
4643 constant parts) by copying from a constant. */
4644 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4645 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4647 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4648 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4649 char *bit_buffer = (char *) alloca (nbits);
4650 HOST_WIDE_INT word = 0;
4651 unsigned int bit_pos = 0;
4652 unsigned int ibit = 0;
4653 unsigned int offset = 0; /* In bytes from beginning of set. */
4655 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4656 for (;;)
4658 if (bit_buffer[ibit])
4660 if (BYTES_BIG_ENDIAN)
4661 word |= (1 << (set_word_size - 1 - bit_pos));
4662 else
4663 word |= 1 << bit_pos;
4666 bit_pos++; ibit++;
4667 if (bit_pos >= set_word_size || ibit == nbits)
4669 if (word != 0 || ! cleared)
4671 rtx datum = GEN_INT (word);
4672 rtx to_rtx;
4674 /* The assumption here is that it is safe to use
4675 XEXP if the set is multi-word, but not if
4676 it's single-word. */
4677 if (GET_CODE (target) == MEM)
4679 to_rtx = plus_constant (XEXP (target, 0), offset);
4680 to_rtx = change_address (target, mode, to_rtx);
4682 else if (offset == 0)
4683 to_rtx = target;
4684 else
4685 abort ();
4686 emit_move_insn (to_rtx, datum);
4689 if (ibit == nbits)
4690 break;
4691 word = 0;
4692 bit_pos = 0;
4693 offset += set_word_size / BITS_PER_UNIT;
4697 else if (!cleared)
4698 /* Don't bother clearing storage if the set is all ones. */
4699 if (TREE_CHAIN (elt) != NULL_TREE
4700 || (TREE_PURPOSE (elt) == NULL_TREE
4701 ? nbits != 1
4702 : ( ! host_integerp (TREE_VALUE (elt), 0)
4703 || ! host_integerp (TREE_PURPOSE (elt), 0)
4704 || (tree_low_cst (TREE_VALUE (elt), 0)
4705 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4706 != (HOST_WIDE_INT) nbits))))
4707 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4709 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4711 /* Start of range of element or NULL. */
4712 tree startbit = TREE_PURPOSE (elt);
4713 /* End of range of element, or element value. */
4714 tree endbit = TREE_VALUE (elt);
4715 #ifdef TARGET_MEM_FUNCTIONS
4716 HOST_WIDE_INT startb, endb;
4717 #endif
4718 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4720 bitlength_rtx = expand_expr (bitlength,
4721 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4723 /* Handle non-range tuple element like [ expr ]. */
4724 if (startbit == NULL_TREE)
4726 startbit = save_expr (endbit);
4727 endbit = startbit;
4730 startbit = convert (sizetype, startbit);
4731 endbit = convert (sizetype, endbit);
4732 if (! integer_zerop (domain_min))
4734 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4735 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4737 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4738 EXPAND_CONST_ADDRESS);
4739 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4740 EXPAND_CONST_ADDRESS);
4742 if (REG_P (target))
4744 targetx = assign_stack_temp (GET_MODE (target),
4745 GET_MODE_SIZE (GET_MODE (target)),
4747 emit_move_insn (targetx, target);
4750 else if (GET_CODE (target) == MEM)
4751 targetx = target;
4752 else
4753 abort ();
4755 #ifdef TARGET_MEM_FUNCTIONS
4756 /* Optimization: If startbit and endbit are
4757 constants divisible by BITS_PER_UNIT,
4758 call memset instead. */
4759 if (TREE_CODE (startbit) == INTEGER_CST
4760 && TREE_CODE (endbit) == INTEGER_CST
4761 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4762 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4764 emit_library_call (memset_libfunc, 0,
4765 VOIDmode, 3,
4766 plus_constant (XEXP (targetx, 0),
4767 startb / BITS_PER_UNIT),
4768 Pmode,
4769 constm1_rtx, TYPE_MODE (integer_type_node),
4770 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4771 TYPE_MODE (sizetype));
4773 else
4774 #endif
4775 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4776 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4777 bitlength_rtx, TYPE_MODE (sizetype),
4778 startbit_rtx, TYPE_MODE (sizetype),
4779 endbit_rtx, TYPE_MODE (sizetype));
4781 if (REG_P (target))
4782 emit_move_insn (target, targetx);
4786 else
4787 abort ();
4790 /* Store the value of EXP (an expression tree)
4791 into a subfield of TARGET which has mode MODE and occupies
4792 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4793 If MODE is VOIDmode, it means that we are storing into a bit-field.
4795 If VALUE_MODE is VOIDmode, return nothing in particular.
4796 UNSIGNEDP is not used in this case.
4798 Otherwise, return an rtx for the value stored. This rtx
4799 has mode VALUE_MODE if that is convenient to do.
4800 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4802 ALIGN is the alignment that TARGET is known to have.
4803 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4805 ALIAS_SET is the alias set for the destination. This value will
4806 (in general) be different from that for TARGET, since TARGET is a
4807 reference to the containing structure. */
4809 static rtx
4810 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4811 unsignedp, align, total_size, alias_set)
4812 rtx target;
4813 HOST_WIDE_INT bitsize;
4814 HOST_WIDE_INT bitpos;
4815 enum machine_mode mode;
4816 tree exp;
4817 enum machine_mode value_mode;
4818 int unsignedp;
4819 unsigned int align;
4820 HOST_WIDE_INT total_size;
4821 int alias_set;
4823 HOST_WIDE_INT width_mask = 0;
4825 if (TREE_CODE (exp) == ERROR_MARK)
4826 return const0_rtx;
4828 if (bitsize < HOST_BITS_PER_WIDE_INT)
4829 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4831 /* If we are storing into an unaligned field of an aligned union that is
4832 in a register, we may have the mode of TARGET being an integer mode but
4833 MODE == BLKmode. In that case, get an aligned object whose size and
4834 alignment are the same as TARGET and store TARGET into it (we can avoid
4835 the store if the field being stored is the entire width of TARGET). Then
4836 call ourselves recursively to store the field into a BLKmode version of
4837 that object. Finally, load from the object into TARGET. This is not
4838 very efficient in general, but should only be slightly more expensive
4839 than the otherwise-required unaligned accesses. Perhaps this can be
4840 cleaned up later. */
4842 if (mode == BLKmode
4843 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4845 rtx object = assign_stack_temp (GET_MODE (target),
4846 GET_MODE_SIZE (GET_MODE (target)), 0);
4847 rtx blk_object = copy_rtx (object);
4849 MEM_SET_IN_STRUCT_P (object, 1);
4850 MEM_SET_IN_STRUCT_P (blk_object, 1);
4851 PUT_MODE (blk_object, BLKmode);
4853 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4854 emit_move_insn (object, target);
4856 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4857 align, total_size, alias_set);
4859 /* Even though we aren't returning target, we need to
4860 give it the updated value. */
4861 emit_move_insn (target, object);
4863 return blk_object;
4866 if (GET_CODE (target) == CONCAT)
4868 /* We're storing into a struct containing a single __complex. */
4870 if (bitpos != 0)
4871 abort ();
4872 return store_expr (exp, target, 0);
4875 /* If the structure is in a register or if the component
4876 is a bit field, we cannot use addressing to access it.
4877 Use bit-field techniques or SUBREG to store in it. */
4879 if (mode == VOIDmode
4880 || (mode != BLKmode && ! direct_store[(int) mode]
4881 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4882 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4883 || GET_CODE (target) == REG
4884 || GET_CODE (target) == SUBREG
4885 /* If the field isn't aligned enough to store as an ordinary memref,
4886 store it as a bit field. */
4887 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4888 && (align < GET_MODE_ALIGNMENT (mode)
4889 || bitpos % GET_MODE_ALIGNMENT (mode)))
4890 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4891 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
4892 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
4893 /* If the RHS and field are a constant size and the size of the
4894 RHS isn't the same size as the bitfield, we must use bitfield
4895 operations. */
4896 || (bitsize >= 0
4897 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
4898 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
4900 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4902 /* If BITSIZE is narrower than the size of the type of EXP
4903 we will be narrowing TEMP. Normally, what's wanted are the
4904 low-order bits. However, if EXP's type is a record and this is
4905 big-endian machine, we want the upper BITSIZE bits. */
4906 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4907 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4908 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4909 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4910 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4911 - bitsize),
4912 temp, 1);
4914 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4915 MODE. */
4916 if (mode != VOIDmode && mode != BLKmode
4917 && mode != TYPE_MODE (TREE_TYPE (exp)))
4918 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4920 /* If the modes of TARGET and TEMP are both BLKmode, both
4921 must be in memory and BITPOS must be aligned on a byte
4922 boundary. If so, we simply do a block copy. */
4923 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4925 unsigned int exp_align = expr_align (exp);
4927 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4928 || bitpos % BITS_PER_UNIT != 0)
4929 abort ();
4931 target = change_address (target, VOIDmode,
4932 plus_constant (XEXP (target, 0),
4933 bitpos / BITS_PER_UNIT));
4935 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
4936 align = MIN (exp_align, align);
4938 /* Find an alignment that is consistent with the bit position. */
4939 while ((bitpos % align) != 0)
4940 align >>= 1;
4942 emit_block_move (target, temp,
4943 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4944 / BITS_PER_UNIT),
4945 align);
4947 return value_mode == VOIDmode ? const0_rtx : target;
4950 /* Store the value in the bitfield. */
4951 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4952 if (value_mode != VOIDmode)
4954 /* The caller wants an rtx for the value. */
4955 /* If possible, avoid refetching from the bitfield itself. */
4956 if (width_mask != 0
4957 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4959 tree count;
4960 enum machine_mode tmode;
4962 if (unsignedp)
4963 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4964 tmode = GET_MODE (temp);
4965 if (tmode == VOIDmode)
4966 tmode = value_mode;
4967 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4968 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4969 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4971 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4972 NULL_RTX, value_mode, 0, align,
4973 total_size);
4975 return const0_rtx;
4977 else
4979 rtx addr = XEXP (target, 0);
4980 rtx to_rtx;
4982 /* If a value is wanted, it must be the lhs;
4983 so make the address stable for multiple use. */
4985 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4986 && ! CONSTANT_ADDRESS_P (addr)
4987 /* A frame-pointer reference is already stable. */
4988 && ! (GET_CODE (addr) == PLUS
4989 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4990 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4991 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4992 addr = copy_to_reg (addr);
4994 /* Now build a reference to just the desired component. */
4996 to_rtx = copy_rtx (change_address (target, mode,
4997 plus_constant (addr,
4998 (bitpos
4999 / BITS_PER_UNIT))));
5000 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5001 MEM_ALIAS_SET (to_rtx) = alias_set;
5003 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5007 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5008 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
5009 ARRAY_REFs and find the ultimate containing object, which we return.
5011 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5012 bit position, and *PUNSIGNEDP to the signedness of the field.
5013 If the position of the field is variable, we store a tree
5014 giving the variable offset (in units) in *POFFSET.
5015 This offset is in addition to the bit position.
5016 If the position is not variable, we store 0 in *POFFSET.
5017 We set *PALIGNMENT to the alignment of the address that will be
5018 computed. This is the alignment of the thing we return if *POFFSET
5019 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5021 If any of the extraction expressions is volatile,
5022 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5024 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5025 is a mode that can be used to access the field. In that case, *PBITSIZE
5026 is redundant.
5028 If the field describes a variable-sized object, *PMODE is set to
5029 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5030 this case, but the address of the object can be found. */
5032 tree
5033 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5034 punsignedp, pvolatilep, palignment)
5035 tree exp;
5036 HOST_WIDE_INT *pbitsize;
5037 HOST_WIDE_INT *pbitpos;
5038 tree *poffset;
5039 enum machine_mode *pmode;
5040 int *punsignedp;
5041 int *pvolatilep;
5042 unsigned int *palignment;
5044 tree size_tree = 0;
5045 enum machine_mode mode = VOIDmode;
5046 tree offset = size_zero_node;
5047 tree bit_offset = bitsize_zero_node;
5048 unsigned int alignment = BIGGEST_ALIGNMENT;
5049 tree tem;
5051 /* First get the mode, signedness, and size. We do this from just the
5052 outermost expression. */
5053 if (TREE_CODE (exp) == COMPONENT_REF)
5055 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5056 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5057 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5059 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5061 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5063 size_tree = TREE_OPERAND (exp, 1);
5064 *punsignedp = TREE_UNSIGNED (exp);
5066 else
5068 mode = TYPE_MODE (TREE_TYPE (exp));
5069 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5071 if (mode == BLKmode)
5072 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5073 else
5074 *pbitsize = GET_MODE_BITSIZE (mode);
5077 if (size_tree != 0)
5079 if (! host_integerp (size_tree, 1))
5080 mode = BLKmode, *pbitsize = -1;
5081 else
5082 *pbitsize = tree_low_cst (size_tree, 1);
5085 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5086 and find the ultimate containing object. */
5087 while (1)
5089 if (TREE_CODE (exp) == BIT_FIELD_REF)
5090 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5091 else if (TREE_CODE (exp) == COMPONENT_REF)
5093 tree field = TREE_OPERAND (exp, 1);
5094 tree this_offset = DECL_FIELD_OFFSET (field);
5096 /* If this field hasn't been filled in yet, don't go
5097 past it. This should only happen when folding expressions
5098 made during type construction. */
5099 if (this_offset == 0)
5100 break;
5101 else if (! TREE_CONSTANT (this_offset)
5102 && contains_placeholder_p (this_offset))
5103 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5105 offset = size_binop (PLUS_EXPR, offset, this_offset);
5106 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5107 DECL_FIELD_BIT_OFFSET (field));
5109 if (! host_integerp (offset, 0))
5110 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5113 else if (TREE_CODE (exp) == ARRAY_REF)
5115 tree index = TREE_OPERAND (exp, 1);
5116 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5117 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5118 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (exp));
5120 /* We assume all arrays have sizes that are a multiple of a byte.
5121 First subtract the lower bound, if any, in the type of the
5122 index, then convert to sizetype and multiply by the size of the
5123 array element. */
5124 if (low_bound != 0 && ! integer_zerop (low_bound))
5125 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5126 index, low_bound));
5128 /* If the index has a self-referential type, pass it to a
5129 WITH_RECORD_EXPR; if the component size is, pass our
5130 component to one. */
5131 if (! TREE_CONSTANT (index)
5132 && contains_placeholder_p (index))
5133 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5134 if (! TREE_CONSTANT (unit_size)
5135 && contains_placeholder_p (unit_size))
5136 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size,
5137 TREE_OPERAND (exp, 0));
5139 offset = size_binop (PLUS_EXPR, offset,
5140 size_binop (MULT_EXPR,
5141 convert (sizetype, index),
5142 unit_size));
5145 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5146 && ! ((TREE_CODE (exp) == NOP_EXPR
5147 || TREE_CODE (exp) == CONVERT_EXPR)
5148 && (TYPE_MODE (TREE_TYPE (exp))
5149 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5150 break;
5152 /* If any reference in the chain is volatile, the effect is volatile. */
5153 if (TREE_THIS_VOLATILE (exp))
5154 *pvolatilep = 1;
5156 /* If the offset is non-constant already, then we can't assume any
5157 alignment more than the alignment here. */
5158 if (! TREE_CONSTANT (offset))
5159 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5161 exp = TREE_OPERAND (exp, 0);
5164 if (DECL_P (exp))
5165 alignment = MIN (alignment, DECL_ALIGN (exp));
5166 else if (TREE_TYPE (exp) != 0)
5167 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5169 /* If OFFSET is constant, see if we can return the whole thing as a
5170 constant bit position. Otherwise, split it up. */
5171 if (host_integerp (offset, 0)
5172 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5173 bitsize_unit_node))
5174 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5175 && host_integerp (tem, 0))
5176 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5177 else
5178 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5180 *pmode = mode;
5181 *palignment = alignment;
5182 return exp;
5185 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5187 static enum memory_use_mode
5188 get_memory_usage_from_modifier (modifier)
5189 enum expand_modifier modifier;
5191 switch (modifier)
5193 case EXPAND_NORMAL:
5194 case EXPAND_SUM:
5195 return MEMORY_USE_RO;
5196 break;
5197 case EXPAND_MEMORY_USE_WO:
5198 return MEMORY_USE_WO;
5199 break;
5200 case EXPAND_MEMORY_USE_RW:
5201 return MEMORY_USE_RW;
5202 break;
5203 case EXPAND_MEMORY_USE_DONT:
5204 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5205 MEMORY_USE_DONT, because they are modifiers to a call of
5206 expand_expr in the ADDR_EXPR case of expand_expr. */
5207 case EXPAND_CONST_ADDRESS:
5208 case EXPAND_INITIALIZER:
5209 return MEMORY_USE_DONT;
5210 case EXPAND_MEMORY_USE_BAD:
5211 default:
5212 abort ();
5216 /* Given an rtx VALUE that may contain additions and multiplications,
5217 return an equivalent value that just refers to a register or memory.
5218 This is done by generating instructions to perform the arithmetic
5219 and returning a pseudo-register containing the value.
5221 The returned value may be a REG, SUBREG, MEM or constant. */
5224 force_operand (value, target)
5225 rtx value, target;
5227 register optab binoptab = 0;
5228 /* Use a temporary to force order of execution of calls to
5229 `force_operand'. */
5230 rtx tmp;
5231 register rtx op2;
5232 /* Use subtarget as the target for operand 0 of a binary operation. */
5233 register rtx subtarget = get_subtarget (target);
5235 /* Check for a PIC address load. */
5236 if (flag_pic
5237 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5238 && XEXP (value, 0) == pic_offset_table_rtx
5239 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5240 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5241 || GET_CODE (XEXP (value, 1)) == CONST))
5243 if (!subtarget)
5244 subtarget = gen_reg_rtx (GET_MODE (value));
5245 emit_move_insn (subtarget, value);
5246 return subtarget;
5249 if (GET_CODE (value) == PLUS)
5250 binoptab = add_optab;
5251 else if (GET_CODE (value) == MINUS)
5252 binoptab = sub_optab;
5253 else if (GET_CODE (value) == MULT)
5255 op2 = XEXP (value, 1);
5256 if (!CONSTANT_P (op2)
5257 && !(GET_CODE (op2) == REG && op2 != subtarget))
5258 subtarget = 0;
5259 tmp = force_operand (XEXP (value, 0), subtarget);
5260 return expand_mult (GET_MODE (value), tmp,
5261 force_operand (op2, NULL_RTX),
5262 target, 0);
5265 if (binoptab)
5267 op2 = XEXP (value, 1);
5268 if (!CONSTANT_P (op2)
5269 && !(GET_CODE (op2) == REG && op2 != subtarget))
5270 subtarget = 0;
5271 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5273 binoptab = add_optab;
5274 op2 = negate_rtx (GET_MODE (value), op2);
5277 /* Check for an addition with OP2 a constant integer and our first
5278 operand a PLUS of a virtual register and something else. In that
5279 case, we want to emit the sum of the virtual register and the
5280 constant first and then add the other value. This allows virtual
5281 register instantiation to simply modify the constant rather than
5282 creating another one around this addition. */
5283 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5284 && GET_CODE (XEXP (value, 0)) == PLUS
5285 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5286 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5287 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5289 rtx temp = expand_binop (GET_MODE (value), binoptab,
5290 XEXP (XEXP (value, 0), 0), op2,
5291 subtarget, 0, OPTAB_LIB_WIDEN);
5292 return expand_binop (GET_MODE (value), binoptab, temp,
5293 force_operand (XEXP (XEXP (value, 0), 1), 0),
5294 target, 0, OPTAB_LIB_WIDEN);
5297 tmp = force_operand (XEXP (value, 0), subtarget);
5298 return expand_binop (GET_MODE (value), binoptab, tmp,
5299 force_operand (op2, NULL_RTX),
5300 target, 0, OPTAB_LIB_WIDEN);
5301 /* We give UNSIGNEDP = 0 to expand_binop
5302 because the only operations we are expanding here are signed ones. */
5304 return value;
5307 /* Subroutine of expand_expr:
5308 save the non-copied parts (LIST) of an expr (LHS), and return a list
5309 which can restore these values to their previous values,
5310 should something modify their storage. */
5312 static tree
5313 save_noncopied_parts (lhs, list)
5314 tree lhs;
5315 tree list;
5317 tree tail;
5318 tree parts = 0;
5320 for (tail = list; tail; tail = TREE_CHAIN (tail))
5321 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5322 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5323 else
5325 tree part = TREE_VALUE (tail);
5326 tree part_type = TREE_TYPE (part);
5327 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5328 rtx target = assign_temp (part_type, 0, 1, 1);
5329 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5330 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5331 parts = tree_cons (to_be_saved,
5332 build (RTL_EXPR, part_type, NULL_TREE,
5333 (tree) target),
5334 parts);
5335 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5337 return parts;
5340 /* Subroutine of expand_expr:
5341 record the non-copied parts (LIST) of an expr (LHS), and return a list
5342 which specifies the initial values of these parts. */
5344 static tree
5345 init_noncopied_parts (lhs, list)
5346 tree lhs;
5347 tree list;
5349 tree tail;
5350 tree parts = 0;
5352 for (tail = list; tail; tail = TREE_CHAIN (tail))
5353 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5354 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5355 else if (TREE_PURPOSE (tail))
5357 tree part = TREE_VALUE (tail);
5358 tree part_type = TREE_TYPE (part);
5359 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5360 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5362 return parts;
5365 /* Subroutine of expand_expr: return nonzero iff there is no way that
5366 EXP can reference X, which is being modified. TOP_P is nonzero if this
5367 call is going to be used to determine whether we need a temporary
5368 for EXP, as opposed to a recursive call to this function.
5370 It is always safe for this routine to return zero since it merely
5371 searches for optimization opportunities. */
5373 static int
5374 safe_from_p (x, exp, top_p)
5375 rtx x;
5376 tree exp;
5377 int top_p;
5379 rtx exp_rtl = 0;
5380 int i, nops;
5381 static int save_expr_count;
5382 static int save_expr_size = 0;
5383 static tree *save_expr_rewritten;
5384 static tree save_expr_trees[256];
5386 if (x == 0
5387 /* If EXP has varying size, we MUST use a target since we currently
5388 have no way of allocating temporaries of variable size
5389 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5390 So we assume here that something at a higher level has prevented a
5391 clash. This is somewhat bogus, but the best we can do. Only
5392 do this when X is BLKmode and when we are at the top level. */
5393 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5394 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5395 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5396 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5397 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5398 != INTEGER_CST)
5399 && GET_MODE (x) == BLKmode))
5400 return 1;
5402 if (top_p && save_expr_size == 0)
5404 int rtn;
5406 save_expr_count = 0;
5407 save_expr_size = ARRAY_SIZE (save_expr_trees);
5408 save_expr_rewritten = &save_expr_trees[0];
5410 rtn = safe_from_p (x, exp, 1);
5412 for (i = 0; i < save_expr_count; ++i)
5414 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5415 abort ();
5416 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5419 save_expr_size = 0;
5421 return rtn;
5424 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5425 find the underlying pseudo. */
5426 if (GET_CODE (x) == SUBREG)
5428 x = SUBREG_REG (x);
5429 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5430 return 0;
5433 /* If X is a location in the outgoing argument area, it is always safe. */
5434 if (GET_CODE (x) == MEM
5435 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5436 || (GET_CODE (XEXP (x, 0)) == PLUS
5437 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5438 return 1;
5440 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5442 case 'd':
5443 exp_rtl = DECL_RTL (exp);
5444 break;
5446 case 'c':
5447 return 1;
5449 case 'x':
5450 if (TREE_CODE (exp) == TREE_LIST)
5451 return ((TREE_VALUE (exp) == 0
5452 || safe_from_p (x, TREE_VALUE (exp), 0))
5453 && (TREE_CHAIN (exp) == 0
5454 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5455 else if (TREE_CODE (exp) == ERROR_MARK)
5456 return 1; /* An already-visited SAVE_EXPR? */
5457 else
5458 return 0;
5460 case '1':
5461 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5463 case '2':
5464 case '<':
5465 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5466 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5468 case 'e':
5469 case 'r':
5470 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5471 the expression. If it is set, we conflict iff we are that rtx or
5472 both are in memory. Otherwise, we check all operands of the
5473 expression recursively. */
5475 switch (TREE_CODE (exp))
5477 case ADDR_EXPR:
5478 return (staticp (TREE_OPERAND (exp, 0))
5479 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5480 || TREE_STATIC (exp));
5482 case INDIRECT_REF:
5483 if (GET_CODE (x) == MEM)
5484 return 0;
5485 break;
5487 case CALL_EXPR:
5488 exp_rtl = CALL_EXPR_RTL (exp);
5489 if (exp_rtl == 0)
5491 /* Assume that the call will clobber all hard registers and
5492 all of memory. */
5493 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5494 || GET_CODE (x) == MEM)
5495 return 0;
5498 break;
5500 case RTL_EXPR:
5501 /* If a sequence exists, we would have to scan every instruction
5502 in the sequence to see if it was safe. This is probably not
5503 worthwhile. */
5504 if (RTL_EXPR_SEQUENCE (exp))
5505 return 0;
5507 exp_rtl = RTL_EXPR_RTL (exp);
5508 break;
5510 case WITH_CLEANUP_EXPR:
5511 exp_rtl = RTL_EXPR_RTL (exp);
5512 break;
5514 case CLEANUP_POINT_EXPR:
5515 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5517 case SAVE_EXPR:
5518 exp_rtl = SAVE_EXPR_RTL (exp);
5519 if (exp_rtl)
5520 break;
5522 /* This SAVE_EXPR might appear many times in the top-level
5523 safe_from_p() expression, and if it has a complex
5524 subexpression, examining it multiple times could result
5525 in a combinatorial explosion. E.g. on an Alpha
5526 running at least 200MHz, a Fortran test case compiled with
5527 optimization took about 28 minutes to compile -- even though
5528 it was only a few lines long, and the complicated line causing
5529 so much time to be spent in the earlier version of safe_from_p()
5530 had only 293 or so unique nodes.
5532 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5533 where it is so we can turn it back in the top-level safe_from_p()
5534 when we're done. */
5536 /* For now, don't bother re-sizing the array. */
5537 if (save_expr_count >= save_expr_size)
5538 return 0;
5539 save_expr_rewritten[save_expr_count++] = exp;
5541 nops = TREE_CODE_LENGTH (SAVE_EXPR);
5542 for (i = 0; i < nops; i++)
5544 tree operand = TREE_OPERAND (exp, i);
5545 if (operand == NULL_TREE)
5546 continue;
5547 TREE_SET_CODE (exp, ERROR_MARK);
5548 if (!safe_from_p (x, operand, 0))
5549 return 0;
5550 TREE_SET_CODE (exp, SAVE_EXPR);
5552 TREE_SET_CODE (exp, ERROR_MARK);
5553 return 1;
5555 case BIND_EXPR:
5556 /* The only operand we look at is operand 1. The rest aren't
5557 part of the expression. */
5558 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5560 case METHOD_CALL_EXPR:
5561 /* This takes a rtx argument, but shouldn't appear here. */
5562 abort ();
5564 default:
5565 break;
5568 /* If we have an rtx, we do not need to scan our operands. */
5569 if (exp_rtl)
5570 break;
5572 nops = TREE_CODE_LENGTH (TREE_CODE (exp));
5573 for (i = 0; i < nops; i++)
5574 if (TREE_OPERAND (exp, i) != 0
5575 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5576 return 0;
5579 /* If we have an rtl, find any enclosed object. Then see if we conflict
5580 with it. */
5581 if (exp_rtl)
5583 if (GET_CODE (exp_rtl) == SUBREG)
5585 exp_rtl = SUBREG_REG (exp_rtl);
5586 if (GET_CODE (exp_rtl) == REG
5587 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5588 return 0;
5591 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5592 are memory and EXP is not readonly. */
5593 return ! (rtx_equal_p (x, exp_rtl)
5594 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5595 && ! TREE_READONLY (exp)));
5598 /* If we reach here, it is safe. */
5599 return 1;
5602 /* Subroutine of expand_expr: return nonzero iff EXP is an
5603 expression whose type is statically determinable. */
5605 static int
5606 fixed_type_p (exp)
5607 tree exp;
5609 if (TREE_CODE (exp) == PARM_DECL
5610 || TREE_CODE (exp) == VAR_DECL
5611 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5612 || TREE_CODE (exp) == COMPONENT_REF
5613 || TREE_CODE (exp) == ARRAY_REF)
5614 return 1;
5615 return 0;
5618 /* Subroutine of expand_expr: return rtx if EXP is a
5619 variable or parameter; else return 0. */
5621 static rtx
5622 var_rtx (exp)
5623 tree exp;
5625 STRIP_NOPS (exp);
5626 switch (TREE_CODE (exp))
5628 case PARM_DECL:
5629 case VAR_DECL:
5630 return DECL_RTL (exp);
5631 default:
5632 return 0;
5636 #ifdef MAX_INTEGER_COMPUTATION_MODE
5637 void
5638 check_max_integer_computation_mode (exp)
5639 tree exp;
5641 enum tree_code code;
5642 enum machine_mode mode;
5644 /* Strip any NOPs that don't change the mode. */
5645 STRIP_NOPS (exp);
5646 code = TREE_CODE (exp);
5648 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5649 if (code == NOP_EXPR
5650 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5651 return;
5653 /* First check the type of the overall operation. We need only look at
5654 unary, binary and relational operations. */
5655 if (TREE_CODE_CLASS (code) == '1'
5656 || TREE_CODE_CLASS (code) == '2'
5657 || TREE_CODE_CLASS (code) == '<')
5659 mode = TYPE_MODE (TREE_TYPE (exp));
5660 if (GET_MODE_CLASS (mode) == MODE_INT
5661 && mode > MAX_INTEGER_COMPUTATION_MODE)
5662 fatal ("unsupported wide integer operation");
5665 /* Check operand of a unary op. */
5666 if (TREE_CODE_CLASS (code) == '1')
5668 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5669 if (GET_MODE_CLASS (mode) == MODE_INT
5670 && mode > MAX_INTEGER_COMPUTATION_MODE)
5671 fatal ("unsupported wide integer operation");
5674 /* Check operands of a binary/comparison op. */
5675 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5677 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5678 if (GET_MODE_CLASS (mode) == MODE_INT
5679 && mode > MAX_INTEGER_COMPUTATION_MODE)
5680 fatal ("unsupported wide integer operation");
5682 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5683 if (GET_MODE_CLASS (mode) == MODE_INT
5684 && mode > MAX_INTEGER_COMPUTATION_MODE)
5685 fatal ("unsupported wide integer operation");
5688 #endif
5690 /* Utility function used by expand_expr to see if TYPE, a RECORD_TYPE,
5691 has any readonly fields. If any of the fields have types that
5692 contain readonly fields, return true as well. */
5694 static int
5695 readonly_fields_p (type)
5696 tree type;
5698 tree field;
5700 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
5701 if (TREE_CODE (field) == FIELD_DECL
5702 && (TREE_READONLY (field)
5703 || (TREE_CODE (TREE_TYPE (field)) == RECORD_TYPE
5704 && readonly_fields_p (TREE_TYPE (field)))))
5705 return 1;
5707 return 0;
5710 /* expand_expr: generate code for computing expression EXP.
5711 An rtx for the computed value is returned. The value is never null.
5712 In the case of a void EXP, const0_rtx is returned.
5714 The value may be stored in TARGET if TARGET is nonzero.
5715 TARGET is just a suggestion; callers must assume that
5716 the rtx returned may not be the same as TARGET.
5718 If TARGET is CONST0_RTX, it means that the value will be ignored.
5720 If TMODE is not VOIDmode, it suggests generating the
5721 result in mode TMODE. But this is done only when convenient.
5722 Otherwise, TMODE is ignored and the value generated in its natural mode.
5723 TMODE is just a suggestion; callers must assume that
5724 the rtx returned may not have mode TMODE.
5726 Note that TARGET may have neither TMODE nor MODE. In that case, it
5727 probably will not be used.
5729 If MODIFIER is EXPAND_SUM then when EXP is an addition
5730 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5731 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5732 products as above, or REG or MEM, or constant.
5733 Ordinarily in such cases we would output mul or add instructions
5734 and then return a pseudo reg containing the sum.
5736 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5737 it also marks a label as absolutely required (it can't be dead).
5738 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5739 This is used for outputting expressions used in initializers.
5741 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5742 with a constant address even if that address is not normally legitimate.
5743 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5746 expand_expr (exp, target, tmode, modifier)
5747 register tree exp;
5748 rtx target;
5749 enum machine_mode tmode;
5750 enum expand_modifier modifier;
5752 register rtx op0, op1, temp;
5753 tree type = TREE_TYPE (exp);
5754 int unsignedp = TREE_UNSIGNED (type);
5755 register enum machine_mode mode;
5756 register enum tree_code code = TREE_CODE (exp);
5757 optab this_optab;
5758 rtx subtarget, original_target;
5759 int ignore;
5760 tree context;
5761 /* Used by check-memory-usage to make modifier read only. */
5762 enum expand_modifier ro_modifier;
5764 /* Handle ERROR_MARK before anybody tries to access its type. */
5765 if (TREE_CODE (exp) == ERROR_MARK)
5767 op0 = CONST0_RTX (tmode);
5768 if (op0 != 0)
5769 return op0;
5770 return const0_rtx;
5773 mode = TYPE_MODE (type);
5774 /* Use subtarget as the target for operand 0 of a binary operation. */
5775 subtarget = get_subtarget (target);
5776 original_target = target;
5777 ignore = (target == const0_rtx
5778 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5779 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5780 || code == COND_EXPR)
5781 && TREE_CODE (type) == VOID_TYPE));
5783 /* Make a read-only version of the modifier. */
5784 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5785 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5786 ro_modifier = modifier;
5787 else
5788 ro_modifier = EXPAND_NORMAL;
5790 /* If we are going to ignore this result, we need only do something
5791 if there is a side-effect somewhere in the expression. If there
5792 is, short-circuit the most common cases here. Note that we must
5793 not call expand_expr with anything but const0_rtx in case this
5794 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5796 if (ignore)
5798 if (! TREE_SIDE_EFFECTS (exp))
5799 return const0_rtx;
5801 /* Ensure we reference a volatile object even if value is ignored, but
5802 don't do this if all we are doing is taking its address. */
5803 if (TREE_THIS_VOLATILE (exp)
5804 && TREE_CODE (exp) != FUNCTION_DECL
5805 && mode != VOIDmode && mode != BLKmode
5806 && modifier != EXPAND_CONST_ADDRESS)
5808 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5809 if (GET_CODE (temp) == MEM)
5810 temp = copy_to_reg (temp);
5811 return const0_rtx;
5814 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
5815 || code == INDIRECT_REF || code == BUFFER_REF)
5816 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5817 VOIDmode, ro_modifier);
5818 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
5819 || code == ARRAY_REF)
5821 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5822 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5823 return const0_rtx;
5825 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5826 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5827 /* If the second operand has no side effects, just evaluate
5828 the first. */
5829 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5830 VOIDmode, ro_modifier);
5831 else if (code == BIT_FIELD_REF)
5833 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5834 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5835 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
5836 return const0_rtx;
5839 target = 0;
5842 #ifdef MAX_INTEGER_COMPUTATION_MODE
5843 /* Only check stuff here if the mode we want is different from the mode
5844 of the expression; if it's the same, check_max_integer_computiation_mode
5845 will handle it. Do we really need to check this stuff at all? */
5847 if (target
5848 && GET_MODE (target) != mode
5849 && TREE_CODE (exp) != INTEGER_CST
5850 && TREE_CODE (exp) != PARM_DECL
5851 && TREE_CODE (exp) != ARRAY_REF
5852 && TREE_CODE (exp) != COMPONENT_REF
5853 && TREE_CODE (exp) != BIT_FIELD_REF
5854 && TREE_CODE (exp) != INDIRECT_REF
5855 && TREE_CODE (exp) != CALL_EXPR
5856 && TREE_CODE (exp) != VAR_DECL
5857 && TREE_CODE (exp) != RTL_EXPR)
5859 enum machine_mode mode = GET_MODE (target);
5861 if (GET_MODE_CLASS (mode) == MODE_INT
5862 && mode > MAX_INTEGER_COMPUTATION_MODE)
5863 fatal ("unsupported wide integer operation");
5866 if (tmode != mode
5867 && TREE_CODE (exp) != INTEGER_CST
5868 && TREE_CODE (exp) != PARM_DECL
5869 && TREE_CODE (exp) != ARRAY_REF
5870 && TREE_CODE (exp) != COMPONENT_REF
5871 && TREE_CODE (exp) != BIT_FIELD_REF
5872 && TREE_CODE (exp) != INDIRECT_REF
5873 && TREE_CODE (exp) != VAR_DECL
5874 && TREE_CODE (exp) != CALL_EXPR
5875 && TREE_CODE (exp) != RTL_EXPR
5876 && GET_MODE_CLASS (tmode) == MODE_INT
5877 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5878 fatal ("unsupported wide integer operation");
5880 check_max_integer_computation_mode (exp);
5881 #endif
5883 /* If will do cse, generate all results into pseudo registers
5884 since 1) that allows cse to find more things
5885 and 2) otherwise cse could produce an insn the machine
5886 cannot support. */
5888 if (! cse_not_expected && mode != BLKmode && target
5889 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5890 target = subtarget;
5892 switch (code)
5894 case LABEL_DECL:
5896 tree function = decl_function_context (exp);
5897 /* Handle using a label in a containing function. */
5898 if (function != current_function_decl
5899 && function != inline_function_decl && function != 0)
5901 struct function *p = find_function_data (function);
5902 /* Allocate in the memory associated with the function
5903 that the label is in. */
5904 push_obstacks (p->function_obstack,
5905 p->function_maybepermanent_obstack);
5907 p->expr->x_forced_labels
5908 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
5909 p->expr->x_forced_labels);
5910 pop_obstacks ();
5912 else
5914 if (modifier == EXPAND_INITIALIZER)
5915 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5916 label_rtx (exp),
5917 forced_labels);
5920 temp = gen_rtx_MEM (FUNCTION_MODE,
5921 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5922 if (function != current_function_decl
5923 && function != inline_function_decl && function != 0)
5924 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5925 return temp;
5928 case PARM_DECL:
5929 if (DECL_RTL (exp) == 0)
5931 error_with_decl (exp, "prior parameter's size depends on `%s'");
5932 return CONST0_RTX (mode);
5935 /* ... fall through ... */
5937 case VAR_DECL:
5938 /* If a static var's type was incomplete when the decl was written,
5939 but the type is complete now, lay out the decl now. */
5940 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5941 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5943 push_obstacks_nochange ();
5944 end_temporary_allocation ();
5945 layout_decl (exp, 0);
5946 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5947 pop_obstacks ();
5950 /* Although static-storage variables start off initialized, according to
5951 ANSI C, a memcpy could overwrite them with uninitialized values. So
5952 we check them too. This also lets us check for read-only variables
5953 accessed via a non-const declaration, in case it won't be detected
5954 any other way (e.g., in an embedded system or OS kernel without
5955 memory protection).
5957 Aggregates are not checked here; they're handled elsewhere. */
5958 if (cfun && current_function_check_memory_usage
5959 && code == VAR_DECL
5960 && GET_CODE (DECL_RTL (exp)) == MEM
5961 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5963 enum memory_use_mode memory_usage;
5964 memory_usage = get_memory_usage_from_modifier (modifier);
5966 in_check_memory_usage = 1;
5967 if (memory_usage != MEMORY_USE_DONT)
5968 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5969 XEXP (DECL_RTL (exp), 0), Pmode,
5970 GEN_INT (int_size_in_bytes (type)),
5971 TYPE_MODE (sizetype),
5972 GEN_INT (memory_usage),
5973 TYPE_MODE (integer_type_node));
5974 in_check_memory_usage = 0;
5977 /* ... fall through ... */
5979 case FUNCTION_DECL:
5980 case RESULT_DECL:
5981 if (DECL_RTL (exp) == 0)
5982 abort ();
5984 /* Ensure variable marked as used even if it doesn't go through
5985 a parser. If it hasn't be used yet, write out an external
5986 definition. */
5987 if (! TREE_USED (exp))
5989 assemble_external (exp);
5990 TREE_USED (exp) = 1;
5993 /* Show we haven't gotten RTL for this yet. */
5994 temp = 0;
5996 /* Handle variables inherited from containing functions. */
5997 context = decl_function_context (exp);
5999 /* We treat inline_function_decl as an alias for the current function
6000 because that is the inline function whose vars, types, etc.
6001 are being merged into the current function.
6002 See expand_inline_function. */
6004 if (context != 0 && context != current_function_decl
6005 && context != inline_function_decl
6006 /* If var is static, we don't need a static chain to access it. */
6007 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6008 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6010 rtx addr;
6012 /* Mark as non-local and addressable. */
6013 DECL_NONLOCAL (exp) = 1;
6014 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6015 abort ();
6016 mark_addressable (exp);
6017 if (GET_CODE (DECL_RTL (exp)) != MEM)
6018 abort ();
6019 addr = XEXP (DECL_RTL (exp), 0);
6020 if (GET_CODE (addr) == MEM)
6021 addr = change_address (addr, Pmode,
6022 fix_lexical_addr (XEXP (addr, 0), exp));
6023 else
6024 addr = fix_lexical_addr (addr, exp);
6026 temp = change_address (DECL_RTL (exp), mode, addr);
6029 /* This is the case of an array whose size is to be determined
6030 from its initializer, while the initializer is still being parsed.
6031 See expand_decl. */
6033 else if (GET_CODE (DECL_RTL (exp)) == MEM
6034 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6035 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
6036 XEXP (DECL_RTL (exp), 0));
6038 /* If DECL_RTL is memory, we are in the normal case and either
6039 the address is not valid or it is not a register and -fforce-addr
6040 is specified, get the address into a register. */
6042 else if (GET_CODE (DECL_RTL (exp)) == MEM
6043 && modifier != EXPAND_CONST_ADDRESS
6044 && modifier != EXPAND_SUM
6045 && modifier != EXPAND_INITIALIZER
6046 && (! memory_address_p (DECL_MODE (exp),
6047 XEXP (DECL_RTL (exp), 0))
6048 || (flag_force_addr
6049 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6050 temp = change_address (DECL_RTL (exp), VOIDmode,
6051 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6053 /* If we got something, return it. But first, set the alignment
6054 the address is a register. */
6055 if (temp != 0)
6057 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6058 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6060 return temp;
6063 /* If the mode of DECL_RTL does not match that of the decl, it
6064 must be a promoted value. We return a SUBREG of the wanted mode,
6065 but mark it so that we know that it was already extended. */
6067 if (GET_CODE (DECL_RTL (exp)) == REG
6068 && GET_MODE (DECL_RTL (exp)) != mode)
6070 /* Get the signedness used for this variable. Ensure we get the
6071 same mode we got when the variable was declared. */
6072 if (GET_MODE (DECL_RTL (exp))
6073 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6074 abort ();
6076 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
6077 SUBREG_PROMOTED_VAR_P (temp) = 1;
6078 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6079 return temp;
6082 return DECL_RTL (exp);
6084 case INTEGER_CST:
6085 return immed_double_const (TREE_INT_CST_LOW (exp),
6086 TREE_INT_CST_HIGH (exp), mode);
6088 case CONST_DECL:
6089 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6090 EXPAND_MEMORY_USE_BAD);
6092 case REAL_CST:
6093 /* If optimized, generate immediate CONST_DOUBLE
6094 which will be turned into memory by reload if necessary.
6096 We used to force a register so that loop.c could see it. But
6097 this does not allow gen_* patterns to perform optimizations with
6098 the constants. It also produces two insns in cases like "x = 1.0;".
6099 On most machines, floating-point constants are not permitted in
6100 many insns, so we'd end up copying it to a register in any case.
6102 Now, we do the copying in expand_binop, if appropriate. */
6103 return immed_real_const (exp);
6105 case COMPLEX_CST:
6106 case STRING_CST:
6107 if (! TREE_CST_RTL (exp))
6108 output_constant_def (exp);
6110 /* TREE_CST_RTL probably contains a constant address.
6111 On RISC machines where a constant address isn't valid,
6112 make some insns to get that address into a register. */
6113 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6114 && modifier != EXPAND_CONST_ADDRESS
6115 && modifier != EXPAND_INITIALIZER
6116 && modifier != EXPAND_SUM
6117 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6118 || (flag_force_addr
6119 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6120 return change_address (TREE_CST_RTL (exp), VOIDmode,
6121 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6122 return TREE_CST_RTL (exp);
6124 case EXPR_WITH_FILE_LOCATION:
6126 rtx to_return;
6127 const char *saved_input_filename = input_filename;
6128 int saved_lineno = lineno;
6129 input_filename = EXPR_WFL_FILENAME (exp);
6130 lineno = EXPR_WFL_LINENO (exp);
6131 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6132 emit_line_note (input_filename, lineno);
6133 /* Possibly avoid switching back and force here. */
6134 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6135 input_filename = saved_input_filename;
6136 lineno = saved_lineno;
6137 return to_return;
6140 case SAVE_EXPR:
6141 context = decl_function_context (exp);
6143 /* If this SAVE_EXPR was at global context, assume we are an
6144 initialization function and move it into our context. */
6145 if (context == 0)
6146 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6148 /* We treat inline_function_decl as an alias for the current function
6149 because that is the inline function whose vars, types, etc.
6150 are being merged into the current function.
6151 See expand_inline_function. */
6152 if (context == current_function_decl || context == inline_function_decl)
6153 context = 0;
6155 /* If this is non-local, handle it. */
6156 if (context)
6158 /* The following call just exists to abort if the context is
6159 not of a containing function. */
6160 find_function_data (context);
6162 temp = SAVE_EXPR_RTL (exp);
6163 if (temp && GET_CODE (temp) == REG)
6165 put_var_into_stack (exp);
6166 temp = SAVE_EXPR_RTL (exp);
6168 if (temp == 0 || GET_CODE (temp) != MEM)
6169 abort ();
6170 return change_address (temp, mode,
6171 fix_lexical_addr (XEXP (temp, 0), exp));
6173 if (SAVE_EXPR_RTL (exp) == 0)
6175 if (mode == VOIDmode)
6176 temp = const0_rtx;
6177 else
6178 temp = assign_temp (type, 3, 0, 0);
6180 SAVE_EXPR_RTL (exp) = temp;
6181 if (!optimize && GET_CODE (temp) == REG)
6182 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6183 save_expr_regs);
6185 /* If the mode of TEMP does not match that of the expression, it
6186 must be a promoted value. We pass store_expr a SUBREG of the
6187 wanted mode but mark it so that we know that it was already
6188 extended. Note that `unsignedp' was modified above in
6189 this case. */
6191 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6193 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6194 SUBREG_PROMOTED_VAR_P (temp) = 1;
6195 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6198 if (temp == const0_rtx)
6199 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6200 EXPAND_MEMORY_USE_BAD);
6201 else
6202 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6204 TREE_USED (exp) = 1;
6207 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6208 must be a promoted value. We return a SUBREG of the wanted mode,
6209 but mark it so that we know that it was already extended. */
6211 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6212 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6214 /* Compute the signedness and make the proper SUBREG. */
6215 promote_mode (type, mode, &unsignedp, 0);
6216 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6217 SUBREG_PROMOTED_VAR_P (temp) = 1;
6218 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6219 return temp;
6222 return SAVE_EXPR_RTL (exp);
6224 case UNSAVE_EXPR:
6226 rtx temp;
6227 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6228 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6229 return temp;
6232 case PLACEHOLDER_EXPR:
6234 tree placeholder_expr;
6236 /* If there is an object on the head of the placeholder list,
6237 see if some object in it of type TYPE or a pointer to it. For
6238 further information, see tree.def. */
6239 for (placeholder_expr = placeholder_list;
6240 placeholder_expr != 0;
6241 placeholder_expr = TREE_CHAIN (placeholder_expr))
6243 tree need_type = TYPE_MAIN_VARIANT (type);
6244 tree object = 0;
6245 tree old_list = placeholder_list;
6246 tree elt;
6248 /* Find the outermost reference that is of the type we want.
6249 If none, see if any object has a type that is a pointer to
6250 the type we want. */
6251 for (elt = TREE_PURPOSE (placeholder_expr);
6252 elt != 0 && object == 0;
6254 = ((TREE_CODE (elt) == COMPOUND_EXPR
6255 || TREE_CODE (elt) == COND_EXPR)
6256 ? TREE_OPERAND (elt, 1)
6257 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6258 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6259 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6260 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6261 ? TREE_OPERAND (elt, 0) : 0))
6262 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6263 object = elt;
6265 for (elt = TREE_PURPOSE (placeholder_expr);
6266 elt != 0 && object == 0;
6268 = ((TREE_CODE (elt) == COMPOUND_EXPR
6269 || TREE_CODE (elt) == COND_EXPR)
6270 ? TREE_OPERAND (elt, 1)
6271 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6272 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6273 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6274 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6275 ? TREE_OPERAND (elt, 0) : 0))
6276 if (POINTER_TYPE_P (TREE_TYPE (elt))
6277 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6278 == need_type))
6279 object = build1 (INDIRECT_REF, need_type, elt);
6281 if (object != 0)
6283 /* Expand this object skipping the list entries before
6284 it was found in case it is also a PLACEHOLDER_EXPR.
6285 In that case, we want to translate it using subsequent
6286 entries. */
6287 placeholder_list = TREE_CHAIN (placeholder_expr);
6288 temp = expand_expr (object, original_target, tmode,
6289 ro_modifier);
6290 placeholder_list = old_list;
6291 return temp;
6296 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6297 abort ();
6299 case WITH_RECORD_EXPR:
6300 /* Put the object on the placeholder list, expand our first operand,
6301 and pop the list. */
6302 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6303 placeholder_list);
6304 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6305 tmode, ro_modifier);
6306 placeholder_list = TREE_CHAIN (placeholder_list);
6307 return target;
6309 case GOTO_EXPR:
6310 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6311 expand_goto (TREE_OPERAND (exp, 0));
6312 else
6313 expand_computed_goto (TREE_OPERAND (exp, 0));
6314 return const0_rtx;
6316 case EXIT_EXPR:
6317 expand_exit_loop_if_false (NULL_PTR,
6318 invert_truthvalue (TREE_OPERAND (exp, 0)));
6319 return const0_rtx;
6321 case LABELED_BLOCK_EXPR:
6322 if (LABELED_BLOCK_BODY (exp))
6323 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6324 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6325 return const0_rtx;
6327 case EXIT_BLOCK_EXPR:
6328 if (EXIT_BLOCK_RETURN (exp))
6329 sorry ("returned value in block_exit_expr");
6330 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6331 return const0_rtx;
6333 case LOOP_EXPR:
6334 push_temp_slots ();
6335 expand_start_loop (1);
6336 expand_expr_stmt (TREE_OPERAND (exp, 0));
6337 expand_end_loop ();
6338 pop_temp_slots ();
6340 return const0_rtx;
6342 case BIND_EXPR:
6344 tree vars = TREE_OPERAND (exp, 0);
6345 int vars_need_expansion = 0;
6347 /* Need to open a binding contour here because
6348 if there are any cleanups they must be contained here. */
6349 expand_start_bindings (2);
6351 /* Mark the corresponding BLOCK for output in its proper place. */
6352 if (TREE_OPERAND (exp, 2) != 0
6353 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6354 insert_block (TREE_OPERAND (exp, 2));
6356 /* If VARS have not yet been expanded, expand them now. */
6357 while (vars)
6359 if (DECL_RTL (vars) == 0)
6361 vars_need_expansion = 1;
6362 expand_decl (vars);
6364 expand_decl_init (vars);
6365 vars = TREE_CHAIN (vars);
6368 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6370 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6372 return temp;
6375 case RTL_EXPR:
6376 if (RTL_EXPR_SEQUENCE (exp))
6378 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6379 abort ();
6380 emit_insns (RTL_EXPR_SEQUENCE (exp));
6381 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6383 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6384 free_temps_for_rtl_expr (exp);
6385 return RTL_EXPR_RTL (exp);
6387 case CONSTRUCTOR:
6388 /* If we don't need the result, just ensure we evaluate any
6389 subexpressions. */
6390 if (ignore)
6392 tree elt;
6393 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6394 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6395 EXPAND_MEMORY_USE_BAD);
6396 return const0_rtx;
6399 /* All elts simple constants => refer to a constant in memory. But
6400 if this is a non-BLKmode mode, let it store a field at a time
6401 since that should make a CONST_INT or CONST_DOUBLE when we
6402 fold. Likewise, if we have a target we can use, it is best to
6403 store directly into the target unless the type is large enough
6404 that memcpy will be used. If we are making an initializer and
6405 all operands are constant, put it in memory as well. */
6406 else if ((TREE_STATIC (exp)
6407 && ((mode == BLKmode
6408 && ! (target != 0 && safe_from_p (target, exp, 1)))
6409 || TREE_ADDRESSABLE (exp)
6410 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6411 && (! MOVE_BY_PIECES_P
6412 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6413 TYPE_ALIGN (type)))
6414 && ! mostly_zeros_p (exp))))
6415 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6417 rtx constructor = output_constant_def (exp);
6419 if (modifier != EXPAND_CONST_ADDRESS
6420 && modifier != EXPAND_INITIALIZER
6421 && modifier != EXPAND_SUM
6422 && (! memory_address_p (GET_MODE (constructor),
6423 XEXP (constructor, 0))
6424 || (flag_force_addr
6425 && GET_CODE (XEXP (constructor, 0)) != REG)))
6426 constructor = change_address (constructor, VOIDmode,
6427 XEXP (constructor, 0));
6428 return constructor;
6431 else
6433 /* Handle calls that pass values in multiple non-contiguous
6434 locations. The Irix 6 ABI has examples of this. */
6435 if (target == 0 || ! safe_from_p (target, exp, 1)
6436 || GET_CODE (target) == PARALLEL)
6438 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6439 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6440 else
6441 target = assign_temp (type, 0, 1, 1);
6444 if (TREE_READONLY (exp))
6446 if (GET_CODE (target) == MEM)
6447 target = copy_rtx (target);
6449 RTX_UNCHANGING_P (target) = 1;
6452 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6453 int_size_in_bytes (TREE_TYPE (exp)));
6454 return target;
6457 case INDIRECT_REF:
6459 tree exp1 = TREE_OPERAND (exp, 0);
6460 tree index;
6461 tree string = string_constant (exp1, &index);
6463 /* Try to optimize reads from const strings. */
6464 if (string
6465 && TREE_CODE (string) == STRING_CST
6466 && TREE_CODE (index) == INTEGER_CST
6467 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6468 && GET_MODE_CLASS (mode) == MODE_INT
6469 && GET_MODE_SIZE (mode) == 1
6470 && modifier != EXPAND_MEMORY_USE_WO)
6471 return
6472 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6474 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6475 op0 = memory_address (mode, op0);
6477 if (cfun && current_function_check_memory_usage
6478 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6480 enum memory_use_mode memory_usage;
6481 memory_usage = get_memory_usage_from_modifier (modifier);
6483 if (memory_usage != MEMORY_USE_DONT)
6485 in_check_memory_usage = 1;
6486 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6487 op0, Pmode,
6488 GEN_INT (int_size_in_bytes (type)),
6489 TYPE_MODE (sizetype),
6490 GEN_INT (memory_usage),
6491 TYPE_MODE (integer_type_node));
6492 in_check_memory_usage = 0;
6496 temp = gen_rtx_MEM (mode, op0);
6497 set_mem_attributes (temp, exp, 0);
6499 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6500 here, because, in C and C++, the fact that a location is accessed
6501 through a pointer to const does not mean that the value there can
6502 never change. Languages where it can never change should
6503 also set TREE_STATIC. */
6504 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6506 /* If we are writing to this object and its type is a record with
6507 readonly fields, we must mark it as readonly so it will
6508 conflict with readonly references to those fields. */
6509 if (modifier == EXPAND_MEMORY_USE_WO
6510 && TREE_CODE (type) == RECORD_TYPE && readonly_fields_p (type))
6511 RTX_UNCHANGING_P (temp) = 1;
6513 return temp;
6516 case ARRAY_REF:
6517 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6518 abort ();
6521 tree array = TREE_OPERAND (exp, 0);
6522 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6523 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6524 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6525 HOST_WIDE_INT i;
6527 /* Optimize the special-case of a zero lower bound.
6529 We convert the low_bound to sizetype to avoid some problems
6530 with constant folding. (E.g. suppose the lower bound is 1,
6531 and its mode is QI. Without the conversion, (ARRAY
6532 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6533 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6535 if (! integer_zerop (low_bound))
6536 index = size_diffop (index, convert (sizetype, low_bound));
6538 /* Fold an expression like: "foo"[2].
6539 This is not done in fold so it won't happen inside &.
6540 Don't fold if this is for wide characters since it's too
6541 difficult to do correctly and this is a very rare case. */
6543 if (TREE_CODE (array) == STRING_CST
6544 && TREE_CODE (index) == INTEGER_CST
6545 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6546 && GET_MODE_CLASS (mode) == MODE_INT
6547 && GET_MODE_SIZE (mode) == 1)
6548 return
6549 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6551 /* If this is a constant index into a constant array,
6552 just get the value from the array. Handle both the cases when
6553 we have an explicit constructor and when our operand is a variable
6554 that was declared const. */
6556 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6557 && TREE_CODE (index) == INTEGER_CST
6558 && 0 > compare_tree_int (index,
6559 list_length (CONSTRUCTOR_ELTS
6560 (TREE_OPERAND (exp, 0)))))
6562 tree elem;
6564 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6565 i = TREE_INT_CST_LOW (index);
6566 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6569 if (elem)
6570 return expand_expr (fold (TREE_VALUE (elem)), target,
6571 tmode, ro_modifier);
6574 else if (optimize >= 1
6575 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6576 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6577 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6579 if (TREE_CODE (index) == INTEGER_CST)
6581 tree init = DECL_INITIAL (array);
6583 if (TREE_CODE (init) == CONSTRUCTOR)
6585 tree elem;
6587 for (elem = CONSTRUCTOR_ELTS (init);
6588 (elem
6589 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6590 elem = TREE_CHAIN (elem))
6593 if (elem)
6594 return expand_expr (fold (TREE_VALUE (elem)), target,
6595 tmode, ro_modifier);
6597 else if (TREE_CODE (init) == STRING_CST
6598 && 0 > compare_tree_int (index,
6599 TREE_STRING_LENGTH (init)))
6600 return (GEN_INT
6601 (TREE_STRING_POINTER
6602 (init)[TREE_INT_CST_LOW (index)]));
6606 /* Fall through. */
6608 case COMPONENT_REF:
6609 case BIT_FIELD_REF:
6610 /* If the operand is a CONSTRUCTOR, we can just extract the
6611 appropriate field if it is present. Don't do this if we have
6612 already written the data since we want to refer to that copy
6613 and varasm.c assumes that's what we'll do. */
6614 if (code != ARRAY_REF
6615 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6616 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6618 tree elt;
6620 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6621 elt = TREE_CHAIN (elt))
6622 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6623 /* We can normally use the value of the field in the
6624 CONSTRUCTOR. However, if this is a bitfield in
6625 an integral mode that we can fit in a HOST_WIDE_INT,
6626 we must mask only the number of bits in the bitfield,
6627 since this is done implicitly by the constructor. If
6628 the bitfield does not meet either of those conditions,
6629 we can't do this optimization. */
6630 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6631 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6632 == MODE_INT)
6633 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6634 <= HOST_BITS_PER_WIDE_INT))))
6636 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6637 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6639 HOST_WIDE_INT bitsize
6640 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6642 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6644 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6645 op0 = expand_and (op0, op1, target);
6647 else
6649 enum machine_mode imode
6650 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6651 tree count
6652 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6655 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6656 target, 0);
6657 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6658 target, 0);
6662 return op0;
6667 enum machine_mode mode1;
6668 HOST_WIDE_INT bitsize, bitpos;
6669 tree offset;
6670 int volatilep = 0;
6671 unsigned int alignment;
6672 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6673 &mode1, &unsignedp, &volatilep,
6674 &alignment);
6676 /* If we got back the original object, something is wrong. Perhaps
6677 we are evaluating an expression too early. In any event, don't
6678 infinitely recurse. */
6679 if (tem == exp)
6680 abort ();
6682 /* If TEM's type is a union of variable size, pass TARGET to the inner
6683 computation, since it will need a temporary and TARGET is known
6684 to have to do. This occurs in unchecked conversion in Ada. */
6686 op0 = expand_expr (tem,
6687 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6688 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6689 != INTEGER_CST)
6690 ? target : NULL_RTX),
6691 VOIDmode,
6692 (modifier == EXPAND_INITIALIZER
6693 || modifier == EXPAND_CONST_ADDRESS)
6694 ? modifier : EXPAND_NORMAL);
6696 /* If this is a constant, put it into a register if it is a
6697 legitimate constant and OFFSET is 0 and memory if it isn't. */
6698 if (CONSTANT_P (op0))
6700 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6701 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6702 && offset == 0)
6703 op0 = force_reg (mode, op0);
6704 else
6705 op0 = validize_mem (force_const_mem (mode, op0));
6708 if (offset != 0)
6710 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6712 /* If this object is in memory, put it into a register.
6713 This case can't occur in C, but can in Ada if we have
6714 unchecked conversion of an expression from a scalar type to
6715 an array or record type. */
6716 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6717 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6719 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
6721 mark_temp_addr_taken (memloc);
6722 emit_move_insn (memloc, op0);
6723 op0 = memloc;
6726 if (GET_CODE (op0) != MEM)
6727 abort ();
6729 if (GET_MODE (offset_rtx) != ptr_mode)
6731 #ifdef POINTERS_EXTEND_UNSIGNED
6732 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6733 #else
6734 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6735 #endif
6738 /* A constant address in OP0 can have VOIDmode, we must not try
6739 to call force_reg for that case. Avoid that case. */
6740 if (GET_CODE (op0) == MEM
6741 && GET_MODE (op0) == BLKmode
6742 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6743 && bitsize != 0
6744 && (bitpos % bitsize) == 0
6745 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6746 && alignment == GET_MODE_ALIGNMENT (mode1))
6748 rtx temp = change_address (op0, mode1,
6749 plus_constant (XEXP (op0, 0),
6750 (bitpos /
6751 BITS_PER_UNIT)));
6752 if (GET_CODE (XEXP (temp, 0)) == REG)
6753 op0 = temp;
6754 else
6755 op0 = change_address (op0, mode1,
6756 force_reg (GET_MODE (XEXP (temp, 0)),
6757 XEXP (temp, 0)));
6758 bitpos = 0;
6761 op0 = change_address (op0, VOIDmode,
6762 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6763 force_reg (ptr_mode,
6764 offset_rtx)));
6767 /* Don't forget about volatility even if this is a bitfield. */
6768 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6770 op0 = copy_rtx (op0);
6771 MEM_VOLATILE_P (op0) = 1;
6774 /* Check the access. */
6775 if (cfun != 0 && current_function_check_memory_usage
6776 && GET_CODE (op0) == MEM)
6778 enum memory_use_mode memory_usage;
6779 memory_usage = get_memory_usage_from_modifier (modifier);
6781 if (memory_usage != MEMORY_USE_DONT)
6783 rtx to;
6784 int size;
6786 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6787 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6789 /* Check the access right of the pointer. */
6790 in_check_memory_usage = 1;
6791 if (size > BITS_PER_UNIT)
6792 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6793 to, Pmode,
6794 GEN_INT (size / BITS_PER_UNIT),
6795 TYPE_MODE (sizetype),
6796 GEN_INT (memory_usage),
6797 TYPE_MODE (integer_type_node));
6798 in_check_memory_usage = 0;
6802 /* In cases where an aligned union has an unaligned object
6803 as a field, we might be extracting a BLKmode value from
6804 an integer-mode (e.g., SImode) object. Handle this case
6805 by doing the extract into an object as wide as the field
6806 (which we know to be the width of a basic mode), then
6807 storing into memory, and changing the mode to BLKmode.
6808 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6809 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6810 if (mode1 == VOIDmode
6811 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6812 || (modifier != EXPAND_CONST_ADDRESS
6813 && modifier != EXPAND_INITIALIZER
6814 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6815 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6816 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6817 /* If the field isn't aligned enough to fetch as a memref,
6818 fetch it as a bit field. */
6819 || (mode1 != BLKmode
6820 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
6821 && ((TYPE_ALIGN (TREE_TYPE (tem))
6822 < GET_MODE_ALIGNMENT (mode))
6823 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
6824 /* If the type and the field are a constant size and the
6825 size of the type isn't the same size as the bitfield,
6826 we must use bitfield operations. */
6827 || ((bitsize >= 0
6828 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
6829 == INTEGER_CST)
6830 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6831 bitsize)))))
6832 || (modifier != EXPAND_CONST_ADDRESS
6833 && modifier != EXPAND_INITIALIZER
6834 && mode == BLKmode
6835 && SLOW_UNALIGNED_ACCESS (mode, alignment)
6836 && (TYPE_ALIGN (type) > alignment
6837 || bitpos % TYPE_ALIGN (type) != 0)))
6839 enum machine_mode ext_mode = mode;
6841 if (ext_mode == BLKmode
6842 && ! (target != 0 && GET_CODE (op0) == MEM
6843 && GET_CODE (target) == MEM
6844 && bitpos % BITS_PER_UNIT == 0))
6845 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6847 if (ext_mode == BLKmode)
6849 /* In this case, BITPOS must start at a byte boundary and
6850 TARGET, if specified, must be a MEM. */
6851 if (GET_CODE (op0) != MEM
6852 || (target != 0 && GET_CODE (target) != MEM)
6853 || bitpos % BITS_PER_UNIT != 0)
6854 abort ();
6856 op0 = change_address (op0, VOIDmode,
6857 plus_constant (XEXP (op0, 0),
6858 bitpos / BITS_PER_UNIT));
6859 if (target == 0)
6860 target = assign_temp (type, 0, 1, 1);
6862 emit_block_move (target, op0,
6863 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6864 / BITS_PER_UNIT),
6865 BITS_PER_UNIT);
6867 return target;
6870 op0 = validize_mem (op0);
6872 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6873 mark_reg_pointer (XEXP (op0, 0), alignment);
6875 op0 = extract_bit_field (op0, bitsize, bitpos,
6876 unsignedp, target, ext_mode, ext_mode,
6877 alignment,
6878 int_size_in_bytes (TREE_TYPE (tem)));
6880 /* If the result is a record type and BITSIZE is narrower than
6881 the mode of OP0, an integral mode, and this is a big endian
6882 machine, we must put the field into the high-order bits. */
6883 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6884 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6885 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6886 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6887 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6888 - bitsize),
6889 op0, 1);
6891 if (mode == BLKmode)
6893 rtx new = assign_stack_temp (ext_mode,
6894 bitsize / BITS_PER_UNIT, 0);
6896 emit_move_insn (new, op0);
6897 op0 = copy_rtx (new);
6898 PUT_MODE (op0, BLKmode);
6899 MEM_SET_IN_STRUCT_P (op0, 1);
6902 return op0;
6905 /* If the result is BLKmode, use that to access the object
6906 now as well. */
6907 if (mode == BLKmode)
6908 mode1 = BLKmode;
6910 /* Get a reference to just this component. */
6911 if (modifier == EXPAND_CONST_ADDRESS
6912 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6913 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6914 (bitpos / BITS_PER_UNIT)));
6915 else
6916 op0 = change_address (op0, mode1,
6917 plus_constant (XEXP (op0, 0),
6918 (bitpos / BITS_PER_UNIT)));
6920 set_mem_attributes (op0, exp, 0);
6921 if (GET_CODE (XEXP (op0, 0)) == REG)
6922 mark_reg_pointer (XEXP (op0, 0), alignment);
6924 MEM_VOLATILE_P (op0) |= volatilep;
6925 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6926 || modifier == EXPAND_CONST_ADDRESS
6927 || modifier == EXPAND_INITIALIZER)
6928 return op0;
6929 else if (target == 0)
6930 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6932 convert_move (target, op0, unsignedp);
6933 return target;
6936 /* Intended for a reference to a buffer of a file-object in Pascal.
6937 But it's not certain that a special tree code will really be
6938 necessary for these. INDIRECT_REF might work for them. */
6939 case BUFFER_REF:
6940 abort ();
6942 case IN_EXPR:
6944 /* Pascal set IN expression.
6946 Algorithm:
6947 rlo = set_low - (set_low%bits_per_word);
6948 the_word = set [ (index - rlo)/bits_per_word ];
6949 bit_index = index % bits_per_word;
6950 bitmask = 1 << bit_index;
6951 return !!(the_word & bitmask); */
6953 tree set = TREE_OPERAND (exp, 0);
6954 tree index = TREE_OPERAND (exp, 1);
6955 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6956 tree set_type = TREE_TYPE (set);
6957 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6958 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6959 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6960 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6961 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6962 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6963 rtx setaddr = XEXP (setval, 0);
6964 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6965 rtx rlow;
6966 rtx diff, quo, rem, addr, bit, result;
6968 preexpand_calls (exp);
6970 /* If domain is empty, answer is no. Likewise if index is constant
6971 and out of bounds. */
6972 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6973 && TREE_CODE (set_low_bound) == INTEGER_CST
6974 && tree_int_cst_lt (set_high_bound, set_low_bound))
6975 || (TREE_CODE (index) == INTEGER_CST
6976 && TREE_CODE (set_low_bound) == INTEGER_CST
6977 && tree_int_cst_lt (index, set_low_bound))
6978 || (TREE_CODE (set_high_bound) == INTEGER_CST
6979 && TREE_CODE (index) == INTEGER_CST
6980 && tree_int_cst_lt (set_high_bound, index))))
6981 return const0_rtx;
6983 if (target == 0)
6984 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6986 /* If we get here, we have to generate the code for both cases
6987 (in range and out of range). */
6989 op0 = gen_label_rtx ();
6990 op1 = gen_label_rtx ();
6992 if (! (GET_CODE (index_val) == CONST_INT
6993 && GET_CODE (lo_r) == CONST_INT))
6995 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
6996 GET_MODE (index_val), iunsignedp, 0, op1);
6999 if (! (GET_CODE (index_val) == CONST_INT
7000 && GET_CODE (hi_r) == CONST_INT))
7002 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7003 GET_MODE (index_val), iunsignedp, 0, op1);
7006 /* Calculate the element number of bit zero in the first word
7007 of the set. */
7008 if (GET_CODE (lo_r) == CONST_INT)
7009 rlow = GEN_INT (INTVAL (lo_r)
7010 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7011 else
7012 rlow = expand_binop (index_mode, and_optab, lo_r,
7013 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7014 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7016 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7017 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7019 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7020 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7021 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7022 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7024 addr = memory_address (byte_mode,
7025 expand_binop (index_mode, add_optab, diff,
7026 setaddr, NULL_RTX, iunsignedp,
7027 OPTAB_LIB_WIDEN));
7029 /* Extract the bit we want to examine. */
7030 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7031 gen_rtx_MEM (byte_mode, addr),
7032 make_tree (TREE_TYPE (index), rem),
7033 NULL_RTX, 1);
7034 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7035 GET_MODE (target) == byte_mode ? target : 0,
7036 1, OPTAB_LIB_WIDEN);
7038 if (result != target)
7039 convert_move (target, result, 1);
7041 /* Output the code to handle the out-of-range case. */
7042 emit_jump (op0);
7043 emit_label (op1);
7044 emit_move_insn (target, const0_rtx);
7045 emit_label (op0);
7046 return target;
7049 case WITH_CLEANUP_EXPR:
7050 if (RTL_EXPR_RTL (exp) == 0)
7052 RTL_EXPR_RTL (exp)
7053 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7054 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
7056 /* That's it for this cleanup. */
7057 TREE_OPERAND (exp, 2) = 0;
7059 return RTL_EXPR_RTL (exp);
7061 case CLEANUP_POINT_EXPR:
7063 /* Start a new binding layer that will keep track of all cleanup
7064 actions to be performed. */
7065 expand_start_bindings (2);
7067 target_temp_slot_level = temp_slot_level;
7069 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7070 /* If we're going to use this value, load it up now. */
7071 if (! ignore)
7072 op0 = force_not_mem (op0);
7073 preserve_temp_slots (op0);
7074 expand_end_bindings (NULL_TREE, 0, 0);
7076 return op0;
7078 case CALL_EXPR:
7079 /* Check for a built-in function. */
7080 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7081 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7082 == FUNCTION_DECL)
7083 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7084 return expand_builtin (exp, target, subtarget, tmode, ignore);
7086 /* If this call was expanded already by preexpand_calls,
7087 just return the result we got. */
7088 if (CALL_EXPR_RTL (exp) != 0)
7089 return CALL_EXPR_RTL (exp);
7091 return expand_call (exp, target, ignore);
7093 case NON_LVALUE_EXPR:
7094 case NOP_EXPR:
7095 case CONVERT_EXPR:
7096 case REFERENCE_EXPR:
7097 if (TREE_OPERAND (exp, 0) == error_mark_node)
7098 return const0_rtx;
7100 if (TREE_CODE (type) == UNION_TYPE)
7102 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7104 /* If both input and output are BLKmode, this conversion
7105 isn't actually doing anything unless we need to make the
7106 alignment stricter. */
7107 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7108 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7109 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7110 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7111 modifier);
7113 if (target == 0)
7115 if (mode != BLKmode)
7116 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7117 else
7118 target = assign_temp (type, 0, 1, 1);
7121 if (GET_CODE (target) == MEM)
7122 /* Store data into beginning of memory target. */
7123 store_expr (TREE_OPERAND (exp, 0),
7124 change_address (target, TYPE_MODE (valtype), 0), 0);
7126 else if (GET_CODE (target) == REG)
7127 /* Store this field into a union of the proper type. */
7128 store_field (target,
7129 MIN ((int_size_in_bytes (TREE_TYPE
7130 (TREE_OPERAND (exp, 0)))
7131 * BITS_PER_UNIT),
7132 GET_MODE_BITSIZE (mode)),
7133 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7134 VOIDmode, 0, BITS_PER_UNIT,
7135 int_size_in_bytes (type), 0);
7136 else
7137 abort ();
7139 /* Return the entire union. */
7140 return target;
7143 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7145 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7146 ro_modifier);
7148 /* If the signedness of the conversion differs and OP0 is
7149 a promoted SUBREG, clear that indication since we now
7150 have to do the proper extension. */
7151 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7152 && GET_CODE (op0) == SUBREG)
7153 SUBREG_PROMOTED_VAR_P (op0) = 0;
7155 return op0;
7158 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7159 if (GET_MODE (op0) == mode)
7160 return op0;
7162 /* If OP0 is a constant, just convert it into the proper mode. */
7163 if (CONSTANT_P (op0))
7164 return
7165 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7166 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7168 if (modifier == EXPAND_INITIALIZER)
7169 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7171 if (target == 0)
7172 return
7173 convert_to_mode (mode, op0,
7174 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7175 else
7176 convert_move (target, op0,
7177 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7178 return target;
7180 case PLUS_EXPR:
7181 /* We come here from MINUS_EXPR when the second operand is a
7182 constant. */
7183 plus_expr:
7184 this_optab = add_optab;
7186 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7187 something else, make sure we add the register to the constant and
7188 then to the other thing. This case can occur during strength
7189 reduction and doing it this way will produce better code if the
7190 frame pointer or argument pointer is eliminated.
7192 fold-const.c will ensure that the constant is always in the inner
7193 PLUS_EXPR, so the only case we need to do anything about is if
7194 sp, ap, or fp is our second argument, in which case we must swap
7195 the innermost first argument and our second argument. */
7197 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7198 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7199 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7200 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7201 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7202 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7204 tree t = TREE_OPERAND (exp, 1);
7206 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7207 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7210 /* If the result is to be ptr_mode and we are adding an integer to
7211 something, we might be forming a constant. So try to use
7212 plus_constant. If it produces a sum and we can't accept it,
7213 use force_operand. This allows P = &ARR[const] to generate
7214 efficient code on machines where a SYMBOL_REF is not a valid
7215 address.
7217 If this is an EXPAND_SUM call, always return the sum. */
7218 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7219 || mode == ptr_mode)
7221 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7222 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7223 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7225 rtx constant_part;
7227 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7228 EXPAND_SUM);
7229 /* Use immed_double_const to ensure that the constant is
7230 truncated according to the mode of OP1, then sign extended
7231 to a HOST_WIDE_INT. Using the constant directly can result
7232 in non-canonical RTL in a 64x32 cross compile. */
7233 constant_part
7234 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7235 (HOST_WIDE_INT) 0,
7236 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7237 op1 = plus_constant (op1, INTVAL (constant_part));
7238 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7239 op1 = force_operand (op1, target);
7240 return op1;
7243 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7244 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7245 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7247 rtx constant_part;
7249 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7250 EXPAND_SUM);
7251 if (! CONSTANT_P (op0))
7253 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7254 VOIDmode, modifier);
7255 /* Don't go to both_summands if modifier
7256 says it's not right to return a PLUS. */
7257 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7258 goto binop2;
7259 goto both_summands;
7261 /* Use immed_double_const to ensure that the constant is
7262 truncated according to the mode of OP1, then sign extended
7263 to a HOST_WIDE_INT. Using the constant directly can result
7264 in non-canonical RTL in a 64x32 cross compile. */
7265 constant_part
7266 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7267 (HOST_WIDE_INT) 0,
7268 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7269 op0 = plus_constant (op0, INTVAL (constant_part));
7270 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7271 op0 = force_operand (op0, target);
7272 return op0;
7276 /* No sense saving up arithmetic to be done
7277 if it's all in the wrong mode to form part of an address.
7278 And force_operand won't know whether to sign-extend or
7279 zero-extend. */
7280 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7281 || mode != ptr_mode)
7282 goto binop;
7284 preexpand_calls (exp);
7285 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7286 subtarget = 0;
7288 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7289 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7291 both_summands:
7292 /* Make sure any term that's a sum with a constant comes last. */
7293 if (GET_CODE (op0) == PLUS
7294 && CONSTANT_P (XEXP (op0, 1)))
7296 temp = op0;
7297 op0 = op1;
7298 op1 = temp;
7300 /* If adding to a sum including a constant,
7301 associate it to put the constant outside. */
7302 if (GET_CODE (op1) == PLUS
7303 && CONSTANT_P (XEXP (op1, 1)))
7305 rtx constant_term = const0_rtx;
7307 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7308 if (temp != 0)
7309 op0 = temp;
7310 /* Ensure that MULT comes first if there is one. */
7311 else if (GET_CODE (op0) == MULT)
7312 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7313 else
7314 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7316 /* Let's also eliminate constants from op0 if possible. */
7317 op0 = eliminate_constant_term (op0, &constant_term);
7319 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7320 their sum should be a constant. Form it into OP1, since the
7321 result we want will then be OP0 + OP1. */
7323 temp = simplify_binary_operation (PLUS, mode, constant_term,
7324 XEXP (op1, 1));
7325 if (temp != 0)
7326 op1 = temp;
7327 else
7328 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7331 /* Put a constant term last and put a multiplication first. */
7332 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7333 temp = op1, op1 = op0, op0 = temp;
7335 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7336 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7338 case MINUS_EXPR:
7339 /* For initializers, we are allowed to return a MINUS of two
7340 symbolic constants. Here we handle all cases when both operands
7341 are constant. */
7342 /* Handle difference of two symbolic constants,
7343 for the sake of an initializer. */
7344 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7345 && really_constant_p (TREE_OPERAND (exp, 0))
7346 && really_constant_p (TREE_OPERAND (exp, 1)))
7348 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7349 VOIDmode, ro_modifier);
7350 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7351 VOIDmode, ro_modifier);
7353 /* If the last operand is a CONST_INT, use plus_constant of
7354 the negated constant. Else make the MINUS. */
7355 if (GET_CODE (op1) == CONST_INT)
7356 return plus_constant (op0, - INTVAL (op1));
7357 else
7358 return gen_rtx_MINUS (mode, op0, op1);
7360 /* Convert A - const to A + (-const). */
7361 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7363 tree negated = fold (build1 (NEGATE_EXPR, type,
7364 TREE_OPERAND (exp, 1)));
7366 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7367 /* If we can't negate the constant in TYPE, leave it alone and
7368 expand_binop will negate it for us. We used to try to do it
7369 here in the signed version of TYPE, but that doesn't work
7370 on POINTER_TYPEs. */;
7371 else
7373 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7374 goto plus_expr;
7377 this_optab = sub_optab;
7378 goto binop;
7380 case MULT_EXPR:
7381 preexpand_calls (exp);
7382 /* If first operand is constant, swap them.
7383 Thus the following special case checks need only
7384 check the second operand. */
7385 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7387 register tree t1 = TREE_OPERAND (exp, 0);
7388 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7389 TREE_OPERAND (exp, 1) = t1;
7392 /* Attempt to return something suitable for generating an
7393 indexed address, for machines that support that. */
7395 if (modifier == EXPAND_SUM && mode == ptr_mode
7396 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7397 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7399 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7400 EXPAND_SUM);
7402 /* Apply distributive law if OP0 is x+c. */
7403 if (GET_CODE (op0) == PLUS
7404 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7405 return
7406 gen_rtx_PLUS
7407 (mode,
7408 gen_rtx_MULT
7409 (mode, XEXP (op0, 0),
7410 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7411 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7412 * INTVAL (XEXP (op0, 1))));
7414 if (GET_CODE (op0) != REG)
7415 op0 = force_operand (op0, NULL_RTX);
7416 if (GET_CODE (op0) != REG)
7417 op0 = copy_to_mode_reg (mode, op0);
7419 return
7420 gen_rtx_MULT (mode, op0,
7421 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7424 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7425 subtarget = 0;
7427 /* Check for multiplying things that have been extended
7428 from a narrower type. If this machine supports multiplying
7429 in that narrower type with a result in the desired type,
7430 do it that way, and avoid the explicit type-conversion. */
7431 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7432 && TREE_CODE (type) == INTEGER_TYPE
7433 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7434 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7435 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7436 && int_fits_type_p (TREE_OPERAND (exp, 1),
7437 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7438 /* Don't use a widening multiply if a shift will do. */
7439 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7440 > HOST_BITS_PER_WIDE_INT)
7441 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7443 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7444 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7446 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7447 /* If both operands are extended, they must either both
7448 be zero-extended or both be sign-extended. */
7449 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7451 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7453 enum machine_mode innermode
7454 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7455 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7456 ? smul_widen_optab : umul_widen_optab);
7457 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7458 ? umul_widen_optab : smul_widen_optab);
7459 if (mode == GET_MODE_WIDER_MODE (innermode))
7461 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7463 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7464 NULL_RTX, VOIDmode, 0);
7465 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7466 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7467 VOIDmode, 0);
7468 else
7469 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7470 NULL_RTX, VOIDmode, 0);
7471 goto binop2;
7473 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7474 && innermode == word_mode)
7476 rtx htem;
7477 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7478 NULL_RTX, VOIDmode, 0);
7479 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7480 op1 = convert_modes (innermode, mode,
7481 expand_expr (TREE_OPERAND (exp, 1),
7482 NULL_RTX, VOIDmode, 0),
7483 unsignedp);
7484 else
7485 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7486 NULL_RTX, VOIDmode, 0);
7487 temp = expand_binop (mode, other_optab, op0, op1, target,
7488 unsignedp, OPTAB_LIB_WIDEN);
7489 htem = expand_mult_highpart_adjust (innermode,
7490 gen_highpart (innermode, temp),
7491 op0, op1,
7492 gen_highpart (innermode, temp),
7493 unsignedp);
7494 emit_move_insn (gen_highpart (innermode, temp), htem);
7495 return temp;
7499 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7500 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7501 return expand_mult (mode, op0, op1, target, unsignedp);
7503 case TRUNC_DIV_EXPR:
7504 case FLOOR_DIV_EXPR:
7505 case CEIL_DIV_EXPR:
7506 case ROUND_DIV_EXPR:
7507 case EXACT_DIV_EXPR:
7508 preexpand_calls (exp);
7509 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7510 subtarget = 0;
7511 /* Possible optimization: compute the dividend with EXPAND_SUM
7512 then if the divisor is constant can optimize the case
7513 where some terms of the dividend have coeffs divisible by it. */
7514 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7515 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7516 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7518 case RDIV_EXPR:
7519 this_optab = flodiv_optab;
7520 goto binop;
7522 case TRUNC_MOD_EXPR:
7523 case FLOOR_MOD_EXPR:
7524 case CEIL_MOD_EXPR:
7525 case ROUND_MOD_EXPR:
7526 preexpand_calls (exp);
7527 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7528 subtarget = 0;
7529 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7530 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7531 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7533 case FIX_ROUND_EXPR:
7534 case FIX_FLOOR_EXPR:
7535 case FIX_CEIL_EXPR:
7536 abort (); /* Not used for C. */
7538 case FIX_TRUNC_EXPR:
7539 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7540 if (target == 0)
7541 target = gen_reg_rtx (mode);
7542 expand_fix (target, op0, unsignedp);
7543 return target;
7545 case FLOAT_EXPR:
7546 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7547 if (target == 0)
7548 target = gen_reg_rtx (mode);
7549 /* expand_float can't figure out what to do if FROM has VOIDmode.
7550 So give it the correct mode. With -O, cse will optimize this. */
7551 if (GET_MODE (op0) == VOIDmode)
7552 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7553 op0);
7554 expand_float (target, op0,
7555 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7556 return target;
7558 case NEGATE_EXPR:
7559 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7560 temp = expand_unop (mode, neg_optab, op0, target, 0);
7561 if (temp == 0)
7562 abort ();
7563 return temp;
7565 case ABS_EXPR:
7566 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7568 /* Handle complex values specially. */
7569 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7570 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7571 return expand_complex_abs (mode, op0, target, unsignedp);
7573 /* Unsigned abs is simply the operand. Testing here means we don't
7574 risk generating incorrect code below. */
7575 if (TREE_UNSIGNED (type))
7576 return op0;
7578 return expand_abs (mode, op0, target,
7579 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7581 case MAX_EXPR:
7582 case MIN_EXPR:
7583 target = original_target;
7584 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7585 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7586 || GET_MODE (target) != mode
7587 || (GET_CODE (target) == REG
7588 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7589 target = gen_reg_rtx (mode);
7590 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7591 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7593 /* First try to do it with a special MIN or MAX instruction.
7594 If that does not win, use a conditional jump to select the proper
7595 value. */
7596 this_optab = (TREE_UNSIGNED (type)
7597 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7598 : (code == MIN_EXPR ? smin_optab : smax_optab));
7600 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7601 OPTAB_WIDEN);
7602 if (temp != 0)
7603 return temp;
7605 /* At this point, a MEM target is no longer useful; we will get better
7606 code without it. */
7608 if (GET_CODE (target) == MEM)
7609 target = gen_reg_rtx (mode);
7611 if (target != op0)
7612 emit_move_insn (target, op0);
7614 op0 = gen_label_rtx ();
7616 /* If this mode is an integer too wide to compare properly,
7617 compare word by word. Rely on cse to optimize constant cases. */
7618 if (GET_MODE_CLASS (mode) == MODE_INT
7619 && ! can_compare_p (GE, mode, ccp_jump))
7621 if (code == MAX_EXPR)
7622 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7623 target, op1, NULL_RTX, op0);
7624 else
7625 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7626 op1, target, NULL_RTX, op0);
7628 else
7630 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7631 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7632 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7633 op0);
7635 emit_move_insn (target, op1);
7636 emit_label (op0);
7637 return target;
7639 case BIT_NOT_EXPR:
7640 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7641 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7642 if (temp == 0)
7643 abort ();
7644 return temp;
7646 case FFS_EXPR:
7647 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7648 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7649 if (temp == 0)
7650 abort ();
7651 return temp;
7653 /* ??? Can optimize bitwise operations with one arg constant.
7654 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7655 and (a bitwise1 b) bitwise2 b (etc)
7656 but that is probably not worth while. */
7658 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7659 boolean values when we want in all cases to compute both of them. In
7660 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7661 as actual zero-or-1 values and then bitwise anding. In cases where
7662 there cannot be any side effects, better code would be made by
7663 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7664 how to recognize those cases. */
7666 case TRUTH_AND_EXPR:
7667 case BIT_AND_EXPR:
7668 this_optab = and_optab;
7669 goto binop;
7671 case TRUTH_OR_EXPR:
7672 case BIT_IOR_EXPR:
7673 this_optab = ior_optab;
7674 goto binop;
7676 case TRUTH_XOR_EXPR:
7677 case BIT_XOR_EXPR:
7678 this_optab = xor_optab;
7679 goto binop;
7681 case LSHIFT_EXPR:
7682 case RSHIFT_EXPR:
7683 case LROTATE_EXPR:
7684 case RROTATE_EXPR:
7685 preexpand_calls (exp);
7686 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7687 subtarget = 0;
7688 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7689 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7690 unsignedp);
7692 /* Could determine the answer when only additive constants differ. Also,
7693 the addition of one can be handled by changing the condition. */
7694 case LT_EXPR:
7695 case LE_EXPR:
7696 case GT_EXPR:
7697 case GE_EXPR:
7698 case EQ_EXPR:
7699 case NE_EXPR:
7700 case UNORDERED_EXPR:
7701 case ORDERED_EXPR:
7702 case UNLT_EXPR:
7703 case UNLE_EXPR:
7704 case UNGT_EXPR:
7705 case UNGE_EXPR:
7706 case UNEQ_EXPR:
7707 preexpand_calls (exp);
7708 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7709 if (temp != 0)
7710 return temp;
7712 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7713 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7714 && original_target
7715 && GET_CODE (original_target) == REG
7716 && (GET_MODE (original_target)
7717 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7719 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7720 VOIDmode, 0);
7722 if (temp != original_target)
7723 temp = copy_to_reg (temp);
7725 op1 = gen_label_rtx ();
7726 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7727 GET_MODE (temp), unsignedp, 0, op1);
7728 emit_move_insn (temp, const1_rtx);
7729 emit_label (op1);
7730 return temp;
7733 /* If no set-flag instruction, must generate a conditional
7734 store into a temporary variable. Drop through
7735 and handle this like && and ||. */
7737 case TRUTH_ANDIF_EXPR:
7738 case TRUTH_ORIF_EXPR:
7739 if (! ignore
7740 && (target == 0 || ! safe_from_p (target, exp, 1)
7741 /* Make sure we don't have a hard reg (such as function's return
7742 value) live across basic blocks, if not optimizing. */
7743 || (!optimize && GET_CODE (target) == REG
7744 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7745 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7747 if (target)
7748 emit_clr_insn (target);
7750 op1 = gen_label_rtx ();
7751 jumpifnot (exp, op1);
7753 if (target)
7754 emit_0_to_1_insn (target);
7756 emit_label (op1);
7757 return ignore ? const0_rtx : target;
7759 case TRUTH_NOT_EXPR:
7760 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7761 /* The parser is careful to generate TRUTH_NOT_EXPR
7762 only with operands that are always zero or one. */
7763 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7764 target, 1, OPTAB_LIB_WIDEN);
7765 if (temp == 0)
7766 abort ();
7767 return temp;
7769 case COMPOUND_EXPR:
7770 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7771 emit_queue ();
7772 return expand_expr (TREE_OPERAND (exp, 1),
7773 (ignore ? const0_rtx : target),
7774 VOIDmode, 0);
7776 case COND_EXPR:
7777 /* If we would have a "singleton" (see below) were it not for a
7778 conversion in each arm, bring that conversion back out. */
7779 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7780 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7781 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7782 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7784 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7785 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7787 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7788 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7789 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7790 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7791 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7792 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7793 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7794 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7795 return expand_expr (build1 (NOP_EXPR, type,
7796 build (COND_EXPR, TREE_TYPE (true),
7797 TREE_OPERAND (exp, 0),
7798 true, false)),
7799 target, tmode, modifier);
7803 /* Note that COND_EXPRs whose type is a structure or union
7804 are required to be constructed to contain assignments of
7805 a temporary variable, so that we can evaluate them here
7806 for side effect only. If type is void, we must do likewise. */
7808 /* If an arm of the branch requires a cleanup,
7809 only that cleanup is performed. */
7811 tree singleton = 0;
7812 tree binary_op = 0, unary_op = 0;
7814 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7815 convert it to our mode, if necessary. */
7816 if (integer_onep (TREE_OPERAND (exp, 1))
7817 && integer_zerop (TREE_OPERAND (exp, 2))
7818 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7820 if (ignore)
7822 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7823 ro_modifier);
7824 return const0_rtx;
7827 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7828 if (GET_MODE (op0) == mode)
7829 return op0;
7831 if (target == 0)
7832 target = gen_reg_rtx (mode);
7833 convert_move (target, op0, unsignedp);
7834 return target;
7837 /* Check for X ? A + B : A. If we have this, we can copy A to the
7838 output and conditionally add B. Similarly for unary operations.
7839 Don't do this if X has side-effects because those side effects
7840 might affect A or B and the "?" operation is a sequence point in
7841 ANSI. (operand_equal_p tests for side effects.) */
7843 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7844 && operand_equal_p (TREE_OPERAND (exp, 2),
7845 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7846 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7847 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7848 && operand_equal_p (TREE_OPERAND (exp, 1),
7849 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7850 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7851 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7852 && operand_equal_p (TREE_OPERAND (exp, 2),
7853 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7854 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7855 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7856 && operand_equal_p (TREE_OPERAND (exp, 1),
7857 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7858 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7860 /* If we are not to produce a result, we have no target. Otherwise,
7861 if a target was specified use it; it will not be used as an
7862 intermediate target unless it is safe. If no target, use a
7863 temporary. */
7865 if (ignore)
7866 temp = 0;
7867 else if (original_target
7868 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7869 || (singleton && GET_CODE (original_target) == REG
7870 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7871 && original_target == var_rtx (singleton)))
7872 && GET_MODE (original_target) == mode
7873 #ifdef HAVE_conditional_move
7874 && (! can_conditionally_move_p (mode)
7875 || GET_CODE (original_target) == REG
7876 || TREE_ADDRESSABLE (type))
7877 #endif
7878 && ! (GET_CODE (original_target) == MEM
7879 && MEM_VOLATILE_P (original_target)))
7880 temp = original_target;
7881 else if (TREE_ADDRESSABLE (type))
7882 abort ();
7883 else
7884 temp = assign_temp (type, 0, 0, 1);
7886 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7887 do the test of X as a store-flag operation, do this as
7888 A + ((X != 0) << log C). Similarly for other simple binary
7889 operators. Only do for C == 1 if BRANCH_COST is low. */
7890 if (temp && singleton && binary_op
7891 && (TREE_CODE (binary_op) == PLUS_EXPR
7892 || TREE_CODE (binary_op) == MINUS_EXPR
7893 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7894 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7895 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7896 : integer_onep (TREE_OPERAND (binary_op, 1)))
7897 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7899 rtx result;
7900 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7901 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7902 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7903 : xor_optab);
7905 /* If we had X ? A : A + 1, do this as A + (X == 0).
7907 We have to invert the truth value here and then put it
7908 back later if do_store_flag fails. We cannot simply copy
7909 TREE_OPERAND (exp, 0) to another variable and modify that
7910 because invert_truthvalue can modify the tree pointed to
7911 by its argument. */
7912 if (singleton == TREE_OPERAND (exp, 1))
7913 TREE_OPERAND (exp, 0)
7914 = invert_truthvalue (TREE_OPERAND (exp, 0));
7916 result = do_store_flag (TREE_OPERAND (exp, 0),
7917 (safe_from_p (temp, singleton, 1)
7918 ? temp : NULL_RTX),
7919 mode, BRANCH_COST <= 1);
7921 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7922 result = expand_shift (LSHIFT_EXPR, mode, result,
7923 build_int_2 (tree_log2
7924 (TREE_OPERAND
7925 (binary_op, 1)),
7927 (safe_from_p (temp, singleton, 1)
7928 ? temp : NULL_RTX), 0);
7930 if (result)
7932 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7933 return expand_binop (mode, boptab, op1, result, temp,
7934 unsignedp, OPTAB_LIB_WIDEN);
7936 else if (singleton == TREE_OPERAND (exp, 1))
7937 TREE_OPERAND (exp, 0)
7938 = invert_truthvalue (TREE_OPERAND (exp, 0));
7941 do_pending_stack_adjust ();
7942 NO_DEFER_POP;
7943 op0 = gen_label_rtx ();
7945 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7947 if (temp != 0)
7949 /* If the target conflicts with the other operand of the
7950 binary op, we can't use it. Also, we can't use the target
7951 if it is a hard register, because evaluating the condition
7952 might clobber it. */
7953 if ((binary_op
7954 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7955 || (GET_CODE (temp) == REG
7956 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7957 temp = gen_reg_rtx (mode);
7958 store_expr (singleton, temp, 0);
7960 else
7961 expand_expr (singleton,
7962 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7963 if (singleton == TREE_OPERAND (exp, 1))
7964 jumpif (TREE_OPERAND (exp, 0), op0);
7965 else
7966 jumpifnot (TREE_OPERAND (exp, 0), op0);
7968 start_cleanup_deferral ();
7969 if (binary_op && temp == 0)
7970 /* Just touch the other operand. */
7971 expand_expr (TREE_OPERAND (binary_op, 1),
7972 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7973 else if (binary_op)
7974 store_expr (build (TREE_CODE (binary_op), type,
7975 make_tree (type, temp),
7976 TREE_OPERAND (binary_op, 1)),
7977 temp, 0);
7978 else
7979 store_expr (build1 (TREE_CODE (unary_op), type,
7980 make_tree (type, temp)),
7981 temp, 0);
7982 op1 = op0;
7984 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7985 comparison operator. If we have one of these cases, set the
7986 output to A, branch on A (cse will merge these two references),
7987 then set the output to FOO. */
7988 else if (temp
7989 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7990 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7991 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7992 TREE_OPERAND (exp, 1), 0)
7993 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7994 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7995 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7997 if (GET_CODE (temp) == REG
7998 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7999 temp = gen_reg_rtx (mode);
8000 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8001 jumpif (TREE_OPERAND (exp, 0), op0);
8003 start_cleanup_deferral ();
8004 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8005 op1 = op0;
8007 else if (temp
8008 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8009 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8010 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8011 TREE_OPERAND (exp, 2), 0)
8012 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8013 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8014 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8016 if (GET_CODE (temp) == REG
8017 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8018 temp = gen_reg_rtx (mode);
8019 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8020 jumpifnot (TREE_OPERAND (exp, 0), op0);
8022 start_cleanup_deferral ();
8023 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8024 op1 = op0;
8026 else
8028 op1 = gen_label_rtx ();
8029 jumpifnot (TREE_OPERAND (exp, 0), op0);
8031 start_cleanup_deferral ();
8033 /* One branch of the cond can be void, if it never returns. For
8034 example A ? throw : E */
8035 if (temp != 0
8036 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8037 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8038 else
8039 expand_expr (TREE_OPERAND (exp, 1),
8040 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8041 end_cleanup_deferral ();
8042 emit_queue ();
8043 emit_jump_insn (gen_jump (op1));
8044 emit_barrier ();
8045 emit_label (op0);
8046 start_cleanup_deferral ();
8047 if (temp != 0
8048 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8049 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8050 else
8051 expand_expr (TREE_OPERAND (exp, 2),
8052 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8055 end_cleanup_deferral ();
8057 emit_queue ();
8058 emit_label (op1);
8059 OK_DEFER_POP;
8061 return temp;
8064 case TARGET_EXPR:
8066 /* Something needs to be initialized, but we didn't know
8067 where that thing was when building the tree. For example,
8068 it could be the return value of a function, or a parameter
8069 to a function which lays down in the stack, or a temporary
8070 variable which must be passed by reference.
8072 We guarantee that the expression will either be constructed
8073 or copied into our original target. */
8075 tree slot = TREE_OPERAND (exp, 0);
8076 tree cleanups = NULL_TREE;
8077 tree exp1;
8079 if (TREE_CODE (slot) != VAR_DECL)
8080 abort ();
8082 if (! ignore)
8083 target = original_target;
8085 /* Set this here so that if we get a target that refers to a
8086 register variable that's already been used, put_reg_into_stack
8087 knows that it should fix up those uses. */
8088 TREE_USED (slot) = 1;
8090 if (target == 0)
8092 if (DECL_RTL (slot) != 0)
8094 target = DECL_RTL (slot);
8095 /* If we have already expanded the slot, so don't do
8096 it again. (mrs) */
8097 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8098 return target;
8100 else
8102 target = assign_temp (type, 2, 0, 1);
8103 /* All temp slots at this level must not conflict. */
8104 preserve_temp_slots (target);
8105 DECL_RTL (slot) = target;
8106 if (TREE_ADDRESSABLE (slot))
8108 TREE_ADDRESSABLE (slot) = 0;
8109 mark_addressable (slot);
8112 /* Since SLOT is not known to the called function
8113 to belong to its stack frame, we must build an explicit
8114 cleanup. This case occurs when we must build up a reference
8115 to pass the reference as an argument. In this case,
8116 it is very likely that such a reference need not be
8117 built here. */
8119 if (TREE_OPERAND (exp, 2) == 0)
8120 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8121 cleanups = TREE_OPERAND (exp, 2);
8124 else
8126 /* This case does occur, when expanding a parameter which
8127 needs to be constructed on the stack. The target
8128 is the actual stack address that we want to initialize.
8129 The function we call will perform the cleanup in this case. */
8131 /* If we have already assigned it space, use that space,
8132 not target that we were passed in, as our target
8133 parameter is only a hint. */
8134 if (DECL_RTL (slot) != 0)
8136 target = DECL_RTL (slot);
8137 /* If we have already expanded the slot, so don't do
8138 it again. (mrs) */
8139 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8140 return target;
8142 else
8144 DECL_RTL (slot) = target;
8145 /* If we must have an addressable slot, then make sure that
8146 the RTL that we just stored in slot is OK. */
8147 if (TREE_ADDRESSABLE (slot))
8149 TREE_ADDRESSABLE (slot) = 0;
8150 mark_addressable (slot);
8155 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8156 /* Mark it as expanded. */
8157 TREE_OPERAND (exp, 1) = NULL_TREE;
8159 store_expr (exp1, target, 0);
8161 expand_decl_cleanup (NULL_TREE, cleanups);
8163 return target;
8166 case INIT_EXPR:
8168 tree lhs = TREE_OPERAND (exp, 0);
8169 tree rhs = TREE_OPERAND (exp, 1);
8170 tree noncopied_parts = 0;
8171 tree lhs_type = TREE_TYPE (lhs);
8173 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8174 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8175 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8176 TYPE_NONCOPIED_PARTS (lhs_type));
8177 while (noncopied_parts != 0)
8179 expand_assignment (TREE_VALUE (noncopied_parts),
8180 TREE_PURPOSE (noncopied_parts), 0, 0);
8181 noncopied_parts = TREE_CHAIN (noncopied_parts);
8183 return temp;
8186 case MODIFY_EXPR:
8188 /* If lhs is complex, expand calls in rhs before computing it.
8189 That's so we don't compute a pointer and save it over a call.
8190 If lhs is simple, compute it first so we can give it as a
8191 target if the rhs is just a call. This avoids an extra temp and copy
8192 and that prevents a partial-subsumption which makes bad code.
8193 Actually we could treat component_ref's of vars like vars. */
8195 tree lhs = TREE_OPERAND (exp, 0);
8196 tree rhs = TREE_OPERAND (exp, 1);
8197 tree noncopied_parts = 0;
8198 tree lhs_type = TREE_TYPE (lhs);
8200 temp = 0;
8202 if (TREE_CODE (lhs) != VAR_DECL
8203 && TREE_CODE (lhs) != RESULT_DECL
8204 && TREE_CODE (lhs) != PARM_DECL
8205 && ! (TREE_CODE (lhs) == INDIRECT_REF
8206 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
8207 preexpand_calls (exp);
8209 /* Check for |= or &= of a bitfield of size one into another bitfield
8210 of size 1. In this case, (unless we need the result of the
8211 assignment) we can do this more efficiently with a
8212 test followed by an assignment, if necessary.
8214 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8215 things change so we do, this code should be enhanced to
8216 support it. */
8217 if (ignore
8218 && TREE_CODE (lhs) == COMPONENT_REF
8219 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8220 || TREE_CODE (rhs) == BIT_AND_EXPR)
8221 && TREE_OPERAND (rhs, 0) == lhs
8222 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8223 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8224 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8226 rtx label = gen_label_rtx ();
8228 do_jump (TREE_OPERAND (rhs, 1),
8229 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8230 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8231 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8232 (TREE_CODE (rhs) == BIT_IOR_EXPR
8233 ? integer_one_node
8234 : integer_zero_node)),
8235 0, 0);
8236 do_pending_stack_adjust ();
8237 emit_label (label);
8238 return const0_rtx;
8241 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8242 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8243 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8244 TYPE_NONCOPIED_PARTS (lhs_type));
8246 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8247 while (noncopied_parts != 0)
8249 expand_assignment (TREE_PURPOSE (noncopied_parts),
8250 TREE_VALUE (noncopied_parts), 0, 0);
8251 noncopied_parts = TREE_CHAIN (noncopied_parts);
8253 return temp;
8256 case RETURN_EXPR:
8257 if (!TREE_OPERAND (exp, 0))
8258 expand_null_return ();
8259 else
8260 expand_return (TREE_OPERAND (exp, 0));
8261 return const0_rtx;
8263 case PREINCREMENT_EXPR:
8264 case PREDECREMENT_EXPR:
8265 return expand_increment (exp, 0, ignore);
8267 case POSTINCREMENT_EXPR:
8268 case POSTDECREMENT_EXPR:
8269 /* Faster to treat as pre-increment if result is not used. */
8270 return expand_increment (exp, ! ignore, ignore);
8272 case ADDR_EXPR:
8273 /* If nonzero, TEMP will be set to the address of something that might
8274 be a MEM corresponding to a stack slot. */
8275 temp = 0;
8277 /* Are we taking the address of a nested function? */
8278 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8279 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8280 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8281 && ! TREE_STATIC (exp))
8283 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8284 op0 = force_operand (op0, target);
8286 /* If we are taking the address of something erroneous, just
8287 return a zero. */
8288 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8289 return const0_rtx;
8290 else
8292 /* We make sure to pass const0_rtx down if we came in with
8293 ignore set, to avoid doing the cleanups twice for something. */
8294 op0 = expand_expr (TREE_OPERAND (exp, 0),
8295 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8296 (modifier == EXPAND_INITIALIZER
8297 ? modifier : EXPAND_CONST_ADDRESS));
8299 /* If we are going to ignore the result, OP0 will have been set
8300 to const0_rtx, so just return it. Don't get confused and
8301 think we are taking the address of the constant. */
8302 if (ignore)
8303 return op0;
8305 op0 = protect_from_queue (op0, 0);
8307 /* We would like the object in memory. If it is a constant, we can
8308 have it be statically allocated into memory. For a non-constant,
8309 we need to allocate some memory and store the value into it. */
8311 if (CONSTANT_P (op0))
8312 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8313 op0);
8314 else if (GET_CODE (op0) == MEM)
8316 mark_temp_addr_taken (op0);
8317 temp = XEXP (op0, 0);
8320 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8321 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8323 /* If this object is in a register, it must be not
8324 be BLKmode. */
8325 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8326 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8328 mark_temp_addr_taken (memloc);
8329 emit_move_insn (memloc, op0);
8330 op0 = memloc;
8333 if (GET_CODE (op0) != MEM)
8334 abort ();
8336 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8338 temp = XEXP (op0, 0);
8339 #ifdef POINTERS_EXTEND_UNSIGNED
8340 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8341 && mode == ptr_mode)
8342 temp = convert_memory_address (ptr_mode, temp);
8343 #endif
8344 return temp;
8347 op0 = force_operand (XEXP (op0, 0), target);
8350 if (flag_force_addr && GET_CODE (op0) != REG)
8351 op0 = force_reg (Pmode, op0);
8353 if (GET_CODE (op0) == REG
8354 && ! REG_USERVAR_P (op0))
8355 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8357 /* If we might have had a temp slot, add an equivalent address
8358 for it. */
8359 if (temp != 0)
8360 update_temp_slot_address (temp, op0);
8362 #ifdef POINTERS_EXTEND_UNSIGNED
8363 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8364 && mode == ptr_mode)
8365 op0 = convert_memory_address (ptr_mode, op0);
8366 #endif
8368 return op0;
8370 case ENTRY_VALUE_EXPR:
8371 abort ();
8373 /* COMPLEX type for Extended Pascal & Fortran */
8374 case COMPLEX_EXPR:
8376 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8377 rtx insns;
8379 /* Get the rtx code of the operands. */
8380 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8381 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8383 if (! target)
8384 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8386 start_sequence ();
8388 /* Move the real (op0) and imaginary (op1) parts to their location. */
8389 emit_move_insn (gen_realpart (mode, target), op0);
8390 emit_move_insn (gen_imagpart (mode, target), op1);
8392 insns = get_insns ();
8393 end_sequence ();
8395 /* Complex construction should appear as a single unit. */
8396 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8397 each with a separate pseudo as destination.
8398 It's not correct for flow to treat them as a unit. */
8399 if (GET_CODE (target) != CONCAT)
8400 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8401 else
8402 emit_insns (insns);
8404 return target;
8407 case REALPART_EXPR:
8408 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8409 return gen_realpart (mode, op0);
8411 case IMAGPART_EXPR:
8412 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8413 return gen_imagpart (mode, op0);
8415 case CONJ_EXPR:
8417 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8418 rtx imag_t;
8419 rtx insns;
8421 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8423 if (! target)
8424 target = gen_reg_rtx (mode);
8426 start_sequence ();
8428 /* Store the realpart and the negated imagpart to target. */
8429 emit_move_insn (gen_realpart (partmode, target),
8430 gen_realpart (partmode, op0));
8432 imag_t = gen_imagpart (partmode, target);
8433 temp = expand_unop (partmode, neg_optab,
8434 gen_imagpart (partmode, op0), imag_t, 0);
8435 if (temp != imag_t)
8436 emit_move_insn (imag_t, temp);
8438 insns = get_insns ();
8439 end_sequence ();
8441 /* Conjugate should appear as a single unit
8442 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8443 each with a separate pseudo as destination.
8444 It's not correct for flow to treat them as a unit. */
8445 if (GET_CODE (target) != CONCAT)
8446 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8447 else
8448 emit_insns (insns);
8450 return target;
8453 case TRY_CATCH_EXPR:
8455 tree handler = TREE_OPERAND (exp, 1);
8457 expand_eh_region_start ();
8459 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8461 expand_eh_region_end (handler);
8463 return op0;
8466 case TRY_FINALLY_EXPR:
8468 tree try_block = TREE_OPERAND (exp, 0);
8469 tree finally_block = TREE_OPERAND (exp, 1);
8470 rtx finally_label = gen_label_rtx ();
8471 rtx done_label = gen_label_rtx ();
8472 rtx return_link = gen_reg_rtx (Pmode);
8473 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8474 (tree) finally_label, (tree) return_link);
8475 TREE_SIDE_EFFECTS (cleanup) = 1;
8477 /* Start a new binding layer that will keep track of all cleanup
8478 actions to be performed. */
8479 expand_start_bindings (2);
8481 target_temp_slot_level = temp_slot_level;
8483 expand_decl_cleanup (NULL_TREE, cleanup);
8484 op0 = expand_expr (try_block, target, tmode, modifier);
8486 preserve_temp_slots (op0);
8487 expand_end_bindings (NULL_TREE, 0, 0);
8488 emit_jump (done_label);
8489 emit_label (finally_label);
8490 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8491 emit_indirect_jump (return_link);
8492 emit_label (done_label);
8493 return op0;
8496 case GOTO_SUBROUTINE_EXPR:
8498 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8499 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8500 rtx return_address = gen_label_rtx ();
8501 emit_move_insn (return_link,
8502 gen_rtx_LABEL_REF (Pmode, return_address));
8503 emit_jump (subr);
8504 emit_label (return_address);
8505 return const0_rtx;
8508 case POPDCC_EXPR:
8510 rtx dcc = get_dynamic_cleanup_chain ();
8511 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8512 return const0_rtx;
8515 case POPDHC_EXPR:
8517 rtx dhc = get_dynamic_handler_chain ();
8518 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8519 return const0_rtx;
8522 case VA_ARG_EXPR:
8523 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8525 default:
8526 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8529 /* Here to do an ordinary binary operator, generating an instruction
8530 from the optab already placed in `this_optab'. */
8531 binop:
8532 preexpand_calls (exp);
8533 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8534 subtarget = 0;
8535 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8536 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8537 binop2:
8538 temp = expand_binop (mode, this_optab, op0, op1, target,
8539 unsignedp, OPTAB_LIB_WIDEN);
8540 if (temp == 0)
8541 abort ();
8542 return temp;
8545 /* Similar to expand_expr, except that we don't specify a target, target
8546 mode, or modifier and we return the alignment of the inner type. This is
8547 used in cases where it is not necessary to align the result to the
8548 alignment of its type as long as we know the alignment of the result, for
8549 example for comparisons of BLKmode values. */
8551 static rtx
8552 expand_expr_unaligned (exp, palign)
8553 register tree exp;
8554 unsigned int *palign;
8556 register rtx op0;
8557 tree type = TREE_TYPE (exp);
8558 register enum machine_mode mode = TYPE_MODE (type);
8560 /* Default the alignment we return to that of the type. */
8561 *palign = TYPE_ALIGN (type);
8563 /* The only cases in which we do anything special is if the resulting mode
8564 is BLKmode. */
8565 if (mode != BLKmode)
8566 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8568 switch (TREE_CODE (exp))
8570 case CONVERT_EXPR:
8571 case NOP_EXPR:
8572 case NON_LVALUE_EXPR:
8573 /* Conversions between BLKmode values don't change the underlying
8574 alignment or value. */
8575 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8576 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8577 break;
8579 case ARRAY_REF:
8580 /* Much of the code for this case is copied directly from expand_expr.
8581 We need to duplicate it here because we will do something different
8582 in the fall-through case, so we need to handle the same exceptions
8583 it does. */
8585 tree array = TREE_OPERAND (exp, 0);
8586 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8587 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8588 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8589 HOST_WIDE_INT i;
8591 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8592 abort ();
8594 /* Optimize the special-case of a zero lower bound.
8596 We convert the low_bound to sizetype to avoid some problems
8597 with constant folding. (E.g. suppose the lower bound is 1,
8598 and its mode is QI. Without the conversion, (ARRAY
8599 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8600 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8602 if (! integer_zerop (low_bound))
8603 index = size_diffop (index, convert (sizetype, low_bound));
8605 /* If this is a constant index into a constant array,
8606 just get the value from the array. Handle both the cases when
8607 we have an explicit constructor and when our operand is a variable
8608 that was declared const. */
8610 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8611 && 0 > compare_tree_int (index,
8612 list_length (CONSTRUCTOR_ELTS
8613 (TREE_OPERAND (exp, 0)))))
8615 tree elem;
8617 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8618 i = TREE_INT_CST_LOW (index);
8619 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8622 if (elem)
8623 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8626 else if (optimize >= 1
8627 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8628 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8629 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8631 if (TREE_CODE (index) == INTEGER_CST)
8633 tree init = DECL_INITIAL (array);
8635 if (TREE_CODE (init) == CONSTRUCTOR)
8637 tree elem;
8639 for (elem = CONSTRUCTOR_ELTS (init);
8640 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8641 elem = TREE_CHAIN (elem))
8644 if (elem)
8645 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8646 palign);
8651 /* Fall through. */
8653 case COMPONENT_REF:
8654 case BIT_FIELD_REF:
8655 /* If the operand is a CONSTRUCTOR, we can just extract the
8656 appropriate field if it is present. Don't do this if we have
8657 already written the data since we want to refer to that copy
8658 and varasm.c assumes that's what we'll do. */
8659 if (TREE_CODE (exp) != ARRAY_REF
8660 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8661 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8663 tree elt;
8665 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8666 elt = TREE_CHAIN (elt))
8667 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8668 /* Note that unlike the case in expand_expr, we know this is
8669 BLKmode and hence not an integer. */
8670 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8674 enum machine_mode mode1;
8675 HOST_WIDE_INT bitsize, bitpos;
8676 tree offset;
8677 int volatilep = 0;
8678 unsigned int alignment;
8679 int unsignedp;
8680 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8681 &mode1, &unsignedp, &volatilep,
8682 &alignment);
8684 /* If we got back the original object, something is wrong. Perhaps
8685 we are evaluating an expression too early. In any event, don't
8686 infinitely recurse. */
8687 if (tem == exp)
8688 abort ();
8690 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8692 /* If this is a constant, put it into a register if it is a
8693 legitimate constant and OFFSET is 0 and memory if it isn't. */
8694 if (CONSTANT_P (op0))
8696 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8698 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8699 && offset == 0)
8700 op0 = force_reg (inner_mode, op0);
8701 else
8702 op0 = validize_mem (force_const_mem (inner_mode, op0));
8705 if (offset != 0)
8707 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8709 /* If this object is in a register, put it into memory.
8710 This case can't occur in C, but can in Ada if we have
8711 unchecked conversion of an expression from a scalar type to
8712 an array or record type. */
8713 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8714 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8716 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
8718 mark_temp_addr_taken (memloc);
8719 emit_move_insn (memloc, op0);
8720 op0 = memloc;
8723 if (GET_CODE (op0) != MEM)
8724 abort ();
8726 if (GET_MODE (offset_rtx) != ptr_mode)
8728 #ifdef POINTERS_EXTEND_UNSIGNED
8729 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8730 #else
8731 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8732 #endif
8735 op0 = change_address (op0, VOIDmode,
8736 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
8737 force_reg (ptr_mode,
8738 offset_rtx)));
8741 /* Don't forget about volatility even if this is a bitfield. */
8742 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
8744 op0 = copy_rtx (op0);
8745 MEM_VOLATILE_P (op0) = 1;
8748 /* Check the access. */
8749 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
8751 rtx to;
8752 int size;
8754 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
8755 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
8757 /* Check the access right of the pointer. */
8758 in_check_memory_usage = 1;
8759 if (size > BITS_PER_UNIT)
8760 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
8761 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
8762 TYPE_MODE (sizetype),
8763 GEN_INT (MEMORY_USE_RO),
8764 TYPE_MODE (integer_type_node));
8765 in_check_memory_usage = 0;
8768 /* In cases where an aligned union has an unaligned object
8769 as a field, we might be extracting a BLKmode value from
8770 an integer-mode (e.g., SImode) object. Handle this case
8771 by doing the extract into an object as wide as the field
8772 (which we know to be the width of a basic mode), then
8773 storing into memory, and changing the mode to BLKmode.
8774 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8775 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8776 if (mode1 == VOIDmode
8777 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8778 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
8779 && (TYPE_ALIGN (type) > alignment
8780 || bitpos % TYPE_ALIGN (type) != 0)))
8782 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
8784 if (ext_mode == BLKmode)
8786 /* In this case, BITPOS must start at a byte boundary. */
8787 if (GET_CODE (op0) != MEM
8788 || bitpos % BITS_PER_UNIT != 0)
8789 abort ();
8791 op0 = change_address (op0, VOIDmode,
8792 plus_constant (XEXP (op0, 0),
8793 bitpos / BITS_PER_UNIT));
8795 else
8797 rtx new = assign_stack_temp (ext_mode,
8798 bitsize / BITS_PER_UNIT, 0);
8800 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
8801 unsignedp, NULL_RTX, ext_mode,
8802 ext_mode, alignment,
8803 int_size_in_bytes (TREE_TYPE (tem)));
8805 /* If the result is a record type and BITSIZE is narrower than
8806 the mode of OP0, an integral mode, and this is a big endian
8807 machine, we must put the field into the high-order bits. */
8808 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
8809 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8810 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
8811 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
8812 size_int (GET_MODE_BITSIZE
8813 (GET_MODE (op0))
8814 - bitsize),
8815 op0, 1);
8817 emit_move_insn (new, op0);
8818 op0 = copy_rtx (new);
8819 PUT_MODE (op0, BLKmode);
8822 else
8823 /* Get a reference to just this component. */
8824 op0 = change_address (op0, mode1,
8825 plus_constant (XEXP (op0, 0),
8826 (bitpos / BITS_PER_UNIT)));
8828 MEM_ALIAS_SET (op0) = get_alias_set (exp);
8830 /* Adjust the alignment in case the bit position is not
8831 a multiple of the alignment of the inner object. */
8832 while (bitpos % alignment != 0)
8833 alignment >>= 1;
8835 if (GET_CODE (XEXP (op0, 0)) == REG)
8836 mark_reg_pointer (XEXP (op0, 0), alignment);
8838 MEM_IN_STRUCT_P (op0) = 1;
8839 MEM_VOLATILE_P (op0) |= volatilep;
8841 *palign = alignment;
8842 return op0;
8845 default:
8846 break;
8850 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8853 /* Return the tree node if a ARG corresponds to a string constant or zero
8854 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8855 in bytes within the string that ARG is accessing. The type of the
8856 offset will be `sizetype'. */
8858 tree
8859 string_constant (arg, ptr_offset)
8860 tree arg;
8861 tree *ptr_offset;
8863 STRIP_NOPS (arg);
8865 if (TREE_CODE (arg) == ADDR_EXPR
8866 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8868 *ptr_offset = size_zero_node;
8869 return TREE_OPERAND (arg, 0);
8871 else if (TREE_CODE (arg) == PLUS_EXPR)
8873 tree arg0 = TREE_OPERAND (arg, 0);
8874 tree arg1 = TREE_OPERAND (arg, 1);
8876 STRIP_NOPS (arg0);
8877 STRIP_NOPS (arg1);
8879 if (TREE_CODE (arg0) == ADDR_EXPR
8880 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8882 *ptr_offset = convert (sizetype, arg1);
8883 return TREE_OPERAND (arg0, 0);
8885 else if (TREE_CODE (arg1) == ADDR_EXPR
8886 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8888 *ptr_offset = convert (sizetype, arg0);
8889 return TREE_OPERAND (arg1, 0);
8893 return 0;
8896 /* Expand code for a post- or pre- increment or decrement
8897 and return the RTX for the result.
8898 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8900 static rtx
8901 expand_increment (exp, post, ignore)
8902 register tree exp;
8903 int post, ignore;
8905 register rtx op0, op1;
8906 register rtx temp, value;
8907 register tree incremented = TREE_OPERAND (exp, 0);
8908 optab this_optab = add_optab;
8909 int icode;
8910 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8911 int op0_is_copy = 0;
8912 int single_insn = 0;
8913 /* 1 means we can't store into OP0 directly,
8914 because it is a subreg narrower than a word,
8915 and we don't dare clobber the rest of the word. */
8916 int bad_subreg = 0;
8918 /* Stabilize any component ref that might need to be
8919 evaluated more than once below. */
8920 if (!post
8921 || TREE_CODE (incremented) == BIT_FIELD_REF
8922 || (TREE_CODE (incremented) == COMPONENT_REF
8923 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8924 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8925 incremented = stabilize_reference (incremented);
8926 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8927 ones into save exprs so that they don't accidentally get evaluated
8928 more than once by the code below. */
8929 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8930 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8931 incremented = save_expr (incremented);
8933 /* Compute the operands as RTX.
8934 Note whether OP0 is the actual lvalue or a copy of it:
8935 I believe it is a copy iff it is a register or subreg
8936 and insns were generated in computing it. */
8938 temp = get_last_insn ();
8939 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
8941 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8942 in place but instead must do sign- or zero-extension during assignment,
8943 so we copy it into a new register and let the code below use it as
8944 a copy.
8946 Note that we can safely modify this SUBREG since it is know not to be
8947 shared (it was made by the expand_expr call above). */
8949 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8951 if (post)
8952 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8953 else
8954 bad_subreg = 1;
8956 else if (GET_CODE (op0) == SUBREG
8957 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8959 /* We cannot increment this SUBREG in place. If we are
8960 post-incrementing, get a copy of the old value. Otherwise,
8961 just mark that we cannot increment in place. */
8962 if (post)
8963 op0 = copy_to_reg (op0);
8964 else
8965 bad_subreg = 1;
8968 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8969 && temp != get_last_insn ());
8970 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8971 EXPAND_MEMORY_USE_BAD);
8973 /* Decide whether incrementing or decrementing. */
8974 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8975 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8976 this_optab = sub_optab;
8978 /* Convert decrement by a constant into a negative increment. */
8979 if (this_optab == sub_optab
8980 && GET_CODE (op1) == CONST_INT)
8982 op1 = GEN_INT (-INTVAL (op1));
8983 this_optab = add_optab;
8986 /* For a preincrement, see if we can do this with a single instruction. */
8987 if (!post)
8989 icode = (int) this_optab->handlers[(int) mode].insn_code;
8990 if (icode != (int) CODE_FOR_nothing
8991 /* Make sure that OP0 is valid for operands 0 and 1
8992 of the insn we want to queue. */
8993 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8994 && (*insn_data[icode].operand[1].predicate) (op0, mode)
8995 && (*insn_data[icode].operand[2].predicate) (op1, mode))
8996 single_insn = 1;
8999 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9000 then we cannot just increment OP0. We must therefore contrive to
9001 increment the original value. Then, for postincrement, we can return
9002 OP0 since it is a copy of the old value. For preincrement, expand here
9003 unless we can do it with a single insn.
9005 Likewise if storing directly into OP0 would clobber high bits
9006 we need to preserve (bad_subreg). */
9007 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9009 /* This is the easiest way to increment the value wherever it is.
9010 Problems with multiple evaluation of INCREMENTED are prevented
9011 because either (1) it is a component_ref or preincrement,
9012 in which case it was stabilized above, or (2) it is an array_ref
9013 with constant index in an array in a register, which is
9014 safe to reevaluate. */
9015 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9016 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9017 ? MINUS_EXPR : PLUS_EXPR),
9018 TREE_TYPE (exp),
9019 incremented,
9020 TREE_OPERAND (exp, 1));
9022 while (TREE_CODE (incremented) == NOP_EXPR
9023 || TREE_CODE (incremented) == CONVERT_EXPR)
9025 newexp = convert (TREE_TYPE (incremented), newexp);
9026 incremented = TREE_OPERAND (incremented, 0);
9029 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9030 return post ? op0 : temp;
9033 if (post)
9035 /* We have a true reference to the value in OP0.
9036 If there is an insn to add or subtract in this mode, queue it.
9037 Queueing the increment insn avoids the register shuffling
9038 that often results if we must increment now and first save
9039 the old value for subsequent use. */
9041 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9042 op0 = stabilize (op0);
9043 #endif
9045 icode = (int) this_optab->handlers[(int) mode].insn_code;
9046 if (icode != (int) CODE_FOR_nothing
9047 /* Make sure that OP0 is valid for operands 0 and 1
9048 of the insn we want to queue. */
9049 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9050 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9052 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9053 op1 = force_reg (mode, op1);
9055 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9057 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9059 rtx addr = (general_operand (XEXP (op0, 0), mode)
9060 ? force_reg (Pmode, XEXP (op0, 0))
9061 : copy_to_reg (XEXP (op0, 0)));
9062 rtx temp, result;
9064 op0 = change_address (op0, VOIDmode, addr);
9065 temp = force_reg (GET_MODE (op0), op0);
9066 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9067 op1 = force_reg (mode, op1);
9069 /* The increment queue is LIFO, thus we have to `queue'
9070 the instructions in reverse order. */
9071 enqueue_insn (op0, gen_move_insn (op0, temp));
9072 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9073 return result;
9077 /* Preincrement, or we can't increment with one simple insn. */
9078 if (post)
9079 /* Save a copy of the value before inc or dec, to return it later. */
9080 temp = value = copy_to_reg (op0);
9081 else
9082 /* Arrange to return the incremented value. */
9083 /* Copy the rtx because expand_binop will protect from the queue,
9084 and the results of that would be invalid for us to return
9085 if our caller does emit_queue before using our result. */
9086 temp = copy_rtx (value = op0);
9088 /* Increment however we can. */
9089 op1 = expand_binop (mode, this_optab, value, op1,
9090 current_function_check_memory_usage ? NULL_RTX : op0,
9091 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9092 /* Make sure the value is stored into OP0. */
9093 if (op1 != op0)
9094 emit_move_insn (op0, op1);
9096 return temp;
9099 /* Expand all function calls contained within EXP, innermost ones first.
9100 But don't look within expressions that have sequence points.
9101 For each CALL_EXPR, record the rtx for its value
9102 in the CALL_EXPR_RTL field. */
9104 static void
9105 preexpand_calls (exp)
9106 tree exp;
9108 register int nops, i;
9109 int class = TREE_CODE_CLASS (TREE_CODE (exp));
9111 if (! do_preexpand_calls)
9112 return;
9114 /* Only expressions and references can contain calls. */
9116 if (! IS_EXPR_CODE_CLASS (class) && class != 'r')
9117 return;
9119 switch (TREE_CODE (exp))
9121 case CALL_EXPR:
9122 /* Do nothing if already expanded. */
9123 if (CALL_EXPR_RTL (exp) != 0
9124 /* Do nothing if the call returns a variable-sized object. */
9125 || (TREE_CODE (TREE_TYPE (exp)) != VOID_TYPE
9126 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
9127 /* Do nothing to built-in functions. */
9128 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9129 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9130 == FUNCTION_DECL)
9131 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9132 return;
9134 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9135 return;
9137 case COMPOUND_EXPR:
9138 case COND_EXPR:
9139 case TRUTH_ANDIF_EXPR:
9140 case TRUTH_ORIF_EXPR:
9141 /* If we find one of these, then we can be sure
9142 the adjust will be done for it (since it makes jumps).
9143 Do it now, so that if this is inside an argument
9144 of a function, we don't get the stack adjustment
9145 after some other args have already been pushed. */
9146 do_pending_stack_adjust ();
9147 return;
9149 case BLOCK:
9150 case RTL_EXPR:
9151 case WITH_CLEANUP_EXPR:
9152 case CLEANUP_POINT_EXPR:
9153 case TRY_CATCH_EXPR:
9154 return;
9156 case SAVE_EXPR:
9157 if (SAVE_EXPR_RTL (exp) != 0)
9158 return;
9160 default:
9161 break;
9164 nops = TREE_CODE_LENGTH (TREE_CODE (exp));
9165 for (i = 0; i < nops; i++)
9166 if (TREE_OPERAND (exp, i) != 0)
9168 if (TREE_CODE (exp) == TARGET_EXPR && i == 2)
9169 /* We don't need to preexpand the cleanup for a TARGET_EXPR.
9170 It doesn't happen before the call is made. */
9172 else
9174 class = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9175 if (IS_EXPR_CODE_CLASS (class) || class == 'r')
9176 preexpand_calls (TREE_OPERAND (exp, i));
9181 /* At the start of a function, record that we have no previously-pushed
9182 arguments waiting to be popped. */
9184 void
9185 init_pending_stack_adjust ()
9187 pending_stack_adjust = 0;
9190 /* When exiting from function, if safe, clear out any pending stack adjust
9191 so the adjustment won't get done.
9193 Note, if the current function calls alloca, then it must have a
9194 frame pointer regardless of the value of flag_omit_frame_pointer. */
9196 void
9197 clear_pending_stack_adjust ()
9199 #ifdef EXIT_IGNORE_STACK
9200 if (optimize > 0
9201 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9202 && EXIT_IGNORE_STACK
9203 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9204 && ! flag_inline_functions)
9206 stack_pointer_delta -= pending_stack_adjust,
9207 pending_stack_adjust = 0;
9209 #endif
9212 /* Pop any previously-pushed arguments that have not been popped yet. */
9214 void
9215 do_pending_stack_adjust ()
9217 if (inhibit_defer_pop == 0)
9219 if (pending_stack_adjust != 0)
9220 adjust_stack (GEN_INT (pending_stack_adjust));
9221 pending_stack_adjust = 0;
9225 /* Expand conditional expressions. */
9227 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9228 LABEL is an rtx of code CODE_LABEL, in this function and all the
9229 functions here. */
9231 void
9232 jumpifnot (exp, label)
9233 tree exp;
9234 rtx label;
9236 do_jump (exp, label, NULL_RTX);
9239 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9241 void
9242 jumpif (exp, label)
9243 tree exp;
9244 rtx label;
9246 do_jump (exp, NULL_RTX, label);
9249 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9250 the result is zero, or IF_TRUE_LABEL if the result is one.
9251 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9252 meaning fall through in that case.
9254 do_jump always does any pending stack adjust except when it does not
9255 actually perform a jump. An example where there is no jump
9256 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9258 This function is responsible for optimizing cases such as
9259 &&, || and comparison operators in EXP. */
9261 void
9262 do_jump (exp, if_false_label, if_true_label)
9263 tree exp;
9264 rtx if_false_label, if_true_label;
9266 register enum tree_code code = TREE_CODE (exp);
9267 /* Some cases need to create a label to jump to
9268 in order to properly fall through.
9269 These cases set DROP_THROUGH_LABEL nonzero. */
9270 rtx drop_through_label = 0;
9271 rtx temp;
9272 int i;
9273 tree type;
9274 enum machine_mode mode;
9276 #ifdef MAX_INTEGER_COMPUTATION_MODE
9277 check_max_integer_computation_mode (exp);
9278 #endif
9280 emit_queue ();
9282 switch (code)
9284 case ERROR_MARK:
9285 break;
9287 case INTEGER_CST:
9288 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9289 if (temp)
9290 emit_jump (temp);
9291 break;
9293 #if 0
9294 /* This is not true with #pragma weak */
9295 case ADDR_EXPR:
9296 /* The address of something can never be zero. */
9297 if (if_true_label)
9298 emit_jump (if_true_label);
9299 break;
9300 #endif
9302 case NOP_EXPR:
9303 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9304 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9305 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9306 goto normal;
9307 case CONVERT_EXPR:
9308 /* If we are narrowing the operand, we have to do the compare in the
9309 narrower mode. */
9310 if ((TYPE_PRECISION (TREE_TYPE (exp))
9311 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9312 goto normal;
9313 case NON_LVALUE_EXPR:
9314 case REFERENCE_EXPR:
9315 case ABS_EXPR:
9316 case NEGATE_EXPR:
9317 case LROTATE_EXPR:
9318 case RROTATE_EXPR:
9319 /* These cannot change zero->non-zero or vice versa. */
9320 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9321 break;
9323 case WITH_RECORD_EXPR:
9324 /* Put the object on the placeholder list, recurse through our first
9325 operand, and pop the list. */
9326 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9327 placeholder_list);
9328 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9329 placeholder_list = TREE_CHAIN (placeholder_list);
9330 break;
9332 #if 0
9333 /* This is never less insns than evaluating the PLUS_EXPR followed by
9334 a test and can be longer if the test is eliminated. */
9335 case PLUS_EXPR:
9336 /* Reduce to minus. */
9337 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9338 TREE_OPERAND (exp, 0),
9339 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9340 TREE_OPERAND (exp, 1))));
9341 /* Process as MINUS. */
9342 #endif
9344 case MINUS_EXPR:
9345 /* Non-zero iff operands of minus differ. */
9346 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9347 TREE_OPERAND (exp, 0),
9348 TREE_OPERAND (exp, 1)),
9349 NE, NE, if_false_label, if_true_label);
9350 break;
9352 case BIT_AND_EXPR:
9353 /* If we are AND'ing with a small constant, do this comparison in the
9354 smallest type that fits. If the machine doesn't have comparisons
9355 that small, it will be converted back to the wider comparison.
9356 This helps if we are testing the sign bit of a narrower object.
9357 combine can't do this for us because it can't know whether a
9358 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9360 if (! SLOW_BYTE_ACCESS
9361 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9362 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9363 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9364 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9365 && (type = type_for_mode (mode, 1)) != 0
9366 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9367 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9368 != CODE_FOR_nothing))
9370 do_jump (convert (type, exp), if_false_label, if_true_label);
9371 break;
9373 goto normal;
9375 case TRUTH_NOT_EXPR:
9376 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9377 break;
9379 case TRUTH_ANDIF_EXPR:
9380 if (if_false_label == 0)
9381 if_false_label = drop_through_label = gen_label_rtx ();
9382 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9383 start_cleanup_deferral ();
9384 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9385 end_cleanup_deferral ();
9386 break;
9388 case TRUTH_ORIF_EXPR:
9389 if (if_true_label == 0)
9390 if_true_label = drop_through_label = gen_label_rtx ();
9391 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9392 start_cleanup_deferral ();
9393 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9394 end_cleanup_deferral ();
9395 break;
9397 case COMPOUND_EXPR:
9398 push_temp_slots ();
9399 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9400 preserve_temp_slots (NULL_RTX);
9401 free_temp_slots ();
9402 pop_temp_slots ();
9403 emit_queue ();
9404 do_pending_stack_adjust ();
9405 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9406 break;
9408 case COMPONENT_REF:
9409 case BIT_FIELD_REF:
9410 case ARRAY_REF:
9412 HOST_WIDE_INT bitsize, bitpos;
9413 int unsignedp;
9414 enum machine_mode mode;
9415 tree type;
9416 tree offset;
9417 int volatilep = 0;
9418 unsigned int alignment;
9420 /* Get description of this reference. We don't actually care
9421 about the underlying object here. */
9422 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9423 &unsignedp, &volatilep, &alignment);
9425 type = type_for_size (bitsize, unsignedp);
9426 if (! SLOW_BYTE_ACCESS
9427 && type != 0 && bitsize >= 0
9428 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9429 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9430 != CODE_FOR_nothing))
9432 do_jump (convert (type, exp), if_false_label, if_true_label);
9433 break;
9435 goto normal;
9438 case COND_EXPR:
9439 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9440 if (integer_onep (TREE_OPERAND (exp, 1))
9441 && integer_zerop (TREE_OPERAND (exp, 2)))
9442 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9444 else if (integer_zerop (TREE_OPERAND (exp, 1))
9445 && integer_onep (TREE_OPERAND (exp, 2)))
9446 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9448 else
9450 register rtx label1 = gen_label_rtx ();
9451 drop_through_label = gen_label_rtx ();
9453 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9455 start_cleanup_deferral ();
9456 /* Now the THEN-expression. */
9457 do_jump (TREE_OPERAND (exp, 1),
9458 if_false_label ? if_false_label : drop_through_label,
9459 if_true_label ? if_true_label : drop_through_label);
9460 /* In case the do_jump just above never jumps. */
9461 do_pending_stack_adjust ();
9462 emit_label (label1);
9464 /* Now the ELSE-expression. */
9465 do_jump (TREE_OPERAND (exp, 2),
9466 if_false_label ? if_false_label : drop_through_label,
9467 if_true_label ? if_true_label : drop_through_label);
9468 end_cleanup_deferral ();
9470 break;
9472 case EQ_EXPR:
9474 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9476 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9477 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9479 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9480 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9481 do_jump
9482 (fold
9483 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9484 fold (build (EQ_EXPR, TREE_TYPE (exp),
9485 fold (build1 (REALPART_EXPR,
9486 TREE_TYPE (inner_type),
9487 exp0)),
9488 fold (build1 (REALPART_EXPR,
9489 TREE_TYPE (inner_type),
9490 exp1)))),
9491 fold (build (EQ_EXPR, TREE_TYPE (exp),
9492 fold (build1 (IMAGPART_EXPR,
9493 TREE_TYPE (inner_type),
9494 exp0)),
9495 fold (build1 (IMAGPART_EXPR,
9496 TREE_TYPE (inner_type),
9497 exp1)))))),
9498 if_false_label, if_true_label);
9501 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9502 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9504 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9505 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9506 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9507 else
9508 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9509 break;
9512 case NE_EXPR:
9514 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9516 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9517 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9519 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9520 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9521 do_jump
9522 (fold
9523 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9524 fold (build (NE_EXPR, TREE_TYPE (exp),
9525 fold (build1 (REALPART_EXPR,
9526 TREE_TYPE (inner_type),
9527 exp0)),
9528 fold (build1 (REALPART_EXPR,
9529 TREE_TYPE (inner_type),
9530 exp1)))),
9531 fold (build (NE_EXPR, TREE_TYPE (exp),
9532 fold (build1 (IMAGPART_EXPR,
9533 TREE_TYPE (inner_type),
9534 exp0)),
9535 fold (build1 (IMAGPART_EXPR,
9536 TREE_TYPE (inner_type),
9537 exp1)))))),
9538 if_false_label, if_true_label);
9541 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9542 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9544 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9545 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9546 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9547 else
9548 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9549 break;
9552 case LT_EXPR:
9553 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9554 if (GET_MODE_CLASS (mode) == MODE_INT
9555 && ! can_compare_p (LT, mode, ccp_jump))
9556 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9557 else
9558 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9559 break;
9561 case LE_EXPR:
9562 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9563 if (GET_MODE_CLASS (mode) == MODE_INT
9564 && ! can_compare_p (LE, mode, ccp_jump))
9565 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9566 else
9567 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9568 break;
9570 case GT_EXPR:
9571 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9572 if (GET_MODE_CLASS (mode) == MODE_INT
9573 && ! can_compare_p (GT, mode, ccp_jump))
9574 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9575 else
9576 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9577 break;
9579 case GE_EXPR:
9580 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9581 if (GET_MODE_CLASS (mode) == MODE_INT
9582 && ! can_compare_p (GE, mode, ccp_jump))
9583 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9584 else
9585 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9586 break;
9588 case UNORDERED_EXPR:
9589 case ORDERED_EXPR:
9591 enum rtx_code cmp, rcmp;
9592 int do_rev;
9594 if (code == UNORDERED_EXPR)
9595 cmp = UNORDERED, rcmp = ORDERED;
9596 else
9597 cmp = ORDERED, rcmp = UNORDERED;
9598 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9600 do_rev = 0;
9601 if (! can_compare_p (cmp, mode, ccp_jump)
9602 && (can_compare_p (rcmp, mode, ccp_jump)
9603 /* If the target doesn't provide either UNORDERED or ORDERED
9604 comparisons, canonicalize on UNORDERED for the library. */
9605 || rcmp == UNORDERED))
9606 do_rev = 1;
9608 if (! do_rev)
9609 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9610 else
9611 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9613 break;
9616 enum rtx_code rcode1;
9617 enum tree_code tcode2;
9619 case UNLT_EXPR:
9620 rcode1 = UNLT;
9621 tcode2 = LT_EXPR;
9622 goto unordered_bcc;
9623 case UNLE_EXPR:
9624 rcode1 = UNLE;
9625 tcode2 = LE_EXPR;
9626 goto unordered_bcc;
9627 case UNGT_EXPR:
9628 rcode1 = UNGT;
9629 tcode2 = GT_EXPR;
9630 goto unordered_bcc;
9631 case UNGE_EXPR:
9632 rcode1 = UNGE;
9633 tcode2 = GE_EXPR;
9634 goto unordered_bcc;
9635 case UNEQ_EXPR:
9636 rcode1 = UNEQ;
9637 tcode2 = EQ_EXPR;
9638 goto unordered_bcc;
9640 unordered_bcc:
9641 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9642 if (can_compare_p (rcode1, mode, ccp_jump))
9643 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9644 if_true_label);
9645 else
9647 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9648 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9649 tree cmp0, cmp1;
9651 /* If the target doesn't support combined unordered
9652 compares, decompose into UNORDERED + comparison. */
9653 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9654 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9655 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9656 do_jump (exp, if_false_label, if_true_label);
9659 break;
9661 default:
9662 normal:
9663 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9664 #if 0
9665 /* This is not needed any more and causes poor code since it causes
9666 comparisons and tests from non-SI objects to have different code
9667 sequences. */
9668 /* Copy to register to avoid generating bad insns by cse
9669 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9670 if (!cse_not_expected && GET_CODE (temp) == MEM)
9671 temp = copy_to_reg (temp);
9672 #endif
9673 do_pending_stack_adjust ();
9674 /* Do any postincrements in the expression that was tested. */
9675 emit_queue ();
9677 if (GET_CODE (temp) == CONST_INT || GET_CODE (temp) == LABEL_REF)
9679 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9680 if (target)
9681 emit_jump (target);
9683 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9684 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9685 /* Note swapping the labels gives us not-equal. */
9686 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9687 else if (GET_MODE (temp) != VOIDmode)
9688 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9689 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9690 GET_MODE (temp), NULL_RTX, 0,
9691 if_false_label, if_true_label);
9692 else
9693 abort ();
9696 if (drop_through_label)
9698 /* If do_jump produces code that might be jumped around,
9699 do any stack adjusts from that code, before the place
9700 where control merges in. */
9701 do_pending_stack_adjust ();
9702 emit_label (drop_through_label);
9706 /* Given a comparison expression EXP for values too wide to be compared
9707 with one insn, test the comparison and jump to the appropriate label.
9708 The code of EXP is ignored; we always test GT if SWAP is 0,
9709 and LT if SWAP is 1. */
9711 static void
9712 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9713 tree exp;
9714 int swap;
9715 rtx if_false_label, if_true_label;
9717 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9718 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9719 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9720 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9722 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9725 /* Compare OP0 with OP1, word at a time, in mode MODE.
9726 UNSIGNEDP says to do unsigned comparison.
9727 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9729 void
9730 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9731 enum machine_mode mode;
9732 int unsignedp;
9733 rtx op0, op1;
9734 rtx if_false_label, if_true_label;
9736 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9737 rtx drop_through_label = 0;
9738 int i;
9740 if (! if_true_label || ! if_false_label)
9741 drop_through_label = gen_label_rtx ();
9742 if (! if_true_label)
9743 if_true_label = drop_through_label;
9744 if (! if_false_label)
9745 if_false_label = drop_through_label;
9747 /* Compare a word at a time, high order first. */
9748 for (i = 0; i < nwords; i++)
9750 rtx op0_word, op1_word;
9752 if (WORDS_BIG_ENDIAN)
9754 op0_word = operand_subword_force (op0, i, mode);
9755 op1_word = operand_subword_force (op1, i, mode);
9757 else
9759 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9760 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9763 /* All but high-order word must be compared as unsigned. */
9764 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9765 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9766 NULL_RTX, if_true_label);
9768 /* Consider lower words only if these are equal. */
9769 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9770 NULL_RTX, 0, NULL_RTX, if_false_label);
9773 if (if_false_label)
9774 emit_jump (if_false_label);
9775 if (drop_through_label)
9776 emit_label (drop_through_label);
9779 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9780 with one insn, test the comparison and jump to the appropriate label. */
9782 static void
9783 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9784 tree exp;
9785 rtx if_false_label, if_true_label;
9787 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9788 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9789 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9790 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9791 int i;
9792 rtx drop_through_label = 0;
9794 if (! if_false_label)
9795 drop_through_label = if_false_label = gen_label_rtx ();
9797 for (i = 0; i < nwords; i++)
9798 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9799 operand_subword_force (op1, i, mode),
9800 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9801 word_mode, NULL_RTX, 0, if_false_label,
9802 NULL_RTX);
9804 if (if_true_label)
9805 emit_jump (if_true_label);
9806 if (drop_through_label)
9807 emit_label (drop_through_label);
9810 /* Jump according to whether OP0 is 0.
9811 We assume that OP0 has an integer mode that is too wide
9812 for the available compare insns. */
9814 void
9815 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9816 rtx op0;
9817 rtx if_false_label, if_true_label;
9819 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9820 rtx part;
9821 int i;
9822 rtx drop_through_label = 0;
9824 /* The fastest way of doing this comparison on almost any machine is to
9825 "or" all the words and compare the result. If all have to be loaded
9826 from memory and this is a very wide item, it's possible this may
9827 be slower, but that's highly unlikely. */
9829 part = gen_reg_rtx (word_mode);
9830 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9831 for (i = 1; i < nwords && part != 0; i++)
9832 part = expand_binop (word_mode, ior_optab, part,
9833 operand_subword_force (op0, i, GET_MODE (op0)),
9834 part, 1, OPTAB_WIDEN);
9836 if (part != 0)
9838 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9839 NULL_RTX, 0, if_false_label, if_true_label);
9841 return;
9844 /* If we couldn't do the "or" simply, do this with a series of compares. */
9845 if (! if_false_label)
9846 drop_through_label = if_false_label = gen_label_rtx ();
9848 for (i = 0; i < nwords; i++)
9849 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9850 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9851 if_false_label, NULL_RTX);
9853 if (if_true_label)
9854 emit_jump (if_true_label);
9856 if (drop_through_label)
9857 emit_label (drop_through_label);
9860 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9861 (including code to compute the values to be compared)
9862 and set (CC0) according to the result.
9863 The decision as to signed or unsigned comparison must be made by the caller.
9865 We force a stack adjustment unless there are currently
9866 things pushed on the stack that aren't yet used.
9868 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9869 compared.
9871 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9872 size of MODE should be used. */
9875 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9876 register rtx op0, op1;
9877 enum rtx_code code;
9878 int unsignedp;
9879 enum machine_mode mode;
9880 rtx size;
9881 unsigned int align;
9883 rtx tem;
9885 /* If one operand is constant, make it the second one. Only do this
9886 if the other operand is not constant as well. */
9888 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9889 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9891 tem = op0;
9892 op0 = op1;
9893 op1 = tem;
9894 code = swap_condition (code);
9897 if (flag_force_mem)
9899 op0 = force_not_mem (op0);
9900 op1 = force_not_mem (op1);
9903 do_pending_stack_adjust ();
9905 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9906 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9907 return tem;
9909 #if 0
9910 /* There's no need to do this now that combine.c can eliminate lots of
9911 sign extensions. This can be less efficient in certain cases on other
9912 machines. */
9914 /* If this is a signed equality comparison, we can do it as an
9915 unsigned comparison since zero-extension is cheaper than sign
9916 extension and comparisons with zero are done as unsigned. This is
9917 the case even on machines that can do fast sign extension, since
9918 zero-extension is easier to combine with other operations than
9919 sign-extension is. If we are comparing against a constant, we must
9920 convert it to what it would look like unsigned. */
9921 if ((code == EQ || code == NE) && ! unsignedp
9922 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9924 if (GET_CODE (op1) == CONST_INT
9925 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9926 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9927 unsignedp = 1;
9929 #endif
9931 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
9933 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9936 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9937 The decision as to signed or unsigned comparison must be made by the caller.
9939 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9940 compared.
9942 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9943 size of MODE should be used. */
9945 void
9946 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
9947 if_false_label, if_true_label)
9948 register rtx op0, op1;
9949 enum rtx_code code;
9950 int unsignedp;
9951 enum machine_mode mode;
9952 rtx size;
9953 unsigned int align;
9954 rtx if_false_label, if_true_label;
9956 rtx tem;
9957 int dummy_true_label = 0;
9959 /* Reverse the comparison if that is safe and we want to jump if it is
9960 false. */
9961 if (! if_true_label && ! FLOAT_MODE_P (mode))
9963 if_true_label = if_false_label;
9964 if_false_label = 0;
9965 code = reverse_condition (code);
9968 /* If one operand is constant, make it the second one. Only do this
9969 if the other operand is not constant as well. */
9971 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9972 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9974 tem = op0;
9975 op0 = op1;
9976 op1 = tem;
9977 code = swap_condition (code);
9980 if (flag_force_mem)
9982 op0 = force_not_mem (op0);
9983 op1 = force_not_mem (op1);
9986 do_pending_stack_adjust ();
9988 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9989 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9991 if (tem == const_true_rtx)
9993 if (if_true_label)
9994 emit_jump (if_true_label);
9996 else
9998 if (if_false_label)
9999 emit_jump (if_false_label);
10001 return;
10004 #if 0
10005 /* There's no need to do this now that combine.c can eliminate lots of
10006 sign extensions. This can be less efficient in certain cases on other
10007 machines. */
10009 /* If this is a signed equality comparison, we can do it as an
10010 unsigned comparison since zero-extension is cheaper than sign
10011 extension and comparisons with zero are done as unsigned. This is
10012 the case even on machines that can do fast sign extension, since
10013 zero-extension is easier to combine with other operations than
10014 sign-extension is. If we are comparing against a constant, we must
10015 convert it to what it would look like unsigned. */
10016 if ((code == EQ || code == NE) && ! unsignedp
10017 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10019 if (GET_CODE (op1) == CONST_INT
10020 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10021 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10022 unsignedp = 1;
10024 #endif
10026 if (! if_true_label)
10028 dummy_true_label = 1;
10029 if_true_label = gen_label_rtx ();
10032 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10033 if_true_label);
10035 if (if_false_label)
10036 emit_jump (if_false_label);
10037 if (dummy_true_label)
10038 emit_label (if_true_label);
10041 /* Generate code for a comparison expression EXP (including code to compute
10042 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10043 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10044 generated code will drop through.
10045 SIGNED_CODE should be the rtx operation for this comparison for
10046 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10048 We force a stack adjustment unless there are currently
10049 things pushed on the stack that aren't yet used. */
10051 static void
10052 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10053 if_true_label)
10054 register tree exp;
10055 enum rtx_code signed_code, unsigned_code;
10056 rtx if_false_label, if_true_label;
10058 unsigned int align0, align1;
10059 register rtx op0, op1;
10060 register tree type;
10061 register enum machine_mode mode;
10062 int unsignedp;
10063 enum rtx_code code;
10065 /* Don't crash if the comparison was erroneous. */
10066 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10067 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10068 return;
10070 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10071 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10072 mode = TYPE_MODE (type);
10073 unsignedp = TREE_UNSIGNED (type);
10074 code = unsignedp ? unsigned_code : signed_code;
10076 #ifdef HAVE_canonicalize_funcptr_for_compare
10077 /* If function pointers need to be "canonicalized" before they can
10078 be reliably compared, then canonicalize them. */
10079 if (HAVE_canonicalize_funcptr_for_compare
10080 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10081 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10082 == FUNCTION_TYPE))
10084 rtx new_op0 = gen_reg_rtx (mode);
10086 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10087 op0 = new_op0;
10090 if (HAVE_canonicalize_funcptr_for_compare
10091 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10092 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10093 == FUNCTION_TYPE))
10095 rtx new_op1 = gen_reg_rtx (mode);
10097 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10098 op1 = new_op1;
10100 #endif
10102 /* Do any postincrements in the expression that was tested. */
10103 emit_queue ();
10105 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10106 ((mode == BLKmode)
10107 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10108 MIN (align0, align1),
10109 if_false_label, if_true_label);
10112 /* Generate code to calculate EXP using a store-flag instruction
10113 and return an rtx for the result. EXP is either a comparison
10114 or a TRUTH_NOT_EXPR whose operand is a comparison.
10116 If TARGET is nonzero, store the result there if convenient.
10118 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10119 cheap.
10121 Return zero if there is no suitable set-flag instruction
10122 available on this machine.
10124 Once expand_expr has been called on the arguments of the comparison,
10125 we are committed to doing the store flag, since it is not safe to
10126 re-evaluate the expression. We emit the store-flag insn by calling
10127 emit_store_flag, but only expand the arguments if we have a reason
10128 to believe that emit_store_flag will be successful. If we think that
10129 it will, but it isn't, we have to simulate the store-flag with a
10130 set/jump/set sequence. */
10132 static rtx
10133 do_store_flag (exp, target, mode, only_cheap)
10134 tree exp;
10135 rtx target;
10136 enum machine_mode mode;
10137 int only_cheap;
10139 enum rtx_code code;
10140 tree arg0, arg1, type;
10141 tree tem;
10142 enum machine_mode operand_mode;
10143 int invert = 0;
10144 int unsignedp;
10145 rtx op0, op1;
10146 enum insn_code icode;
10147 rtx subtarget = target;
10148 rtx result, label;
10150 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10151 result at the end. We can't simply invert the test since it would
10152 have already been inverted if it were valid. This case occurs for
10153 some floating-point comparisons. */
10155 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10156 invert = 1, exp = TREE_OPERAND (exp, 0);
10158 arg0 = TREE_OPERAND (exp, 0);
10159 arg1 = TREE_OPERAND (exp, 1);
10160 type = TREE_TYPE (arg0);
10161 operand_mode = TYPE_MODE (type);
10162 unsignedp = TREE_UNSIGNED (type);
10164 /* We won't bother with BLKmode store-flag operations because it would mean
10165 passing a lot of information to emit_store_flag. */
10166 if (operand_mode == BLKmode)
10167 return 0;
10169 /* We won't bother with store-flag operations involving function pointers
10170 when function pointers must be canonicalized before comparisons. */
10171 #ifdef HAVE_canonicalize_funcptr_for_compare
10172 if (HAVE_canonicalize_funcptr_for_compare
10173 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10174 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10175 == FUNCTION_TYPE))
10176 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10177 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10178 == FUNCTION_TYPE))))
10179 return 0;
10180 #endif
10182 STRIP_NOPS (arg0);
10183 STRIP_NOPS (arg1);
10185 /* Get the rtx comparison code to use. We know that EXP is a comparison
10186 operation of some type. Some comparisons against 1 and -1 can be
10187 converted to comparisons with zero. Do so here so that the tests
10188 below will be aware that we have a comparison with zero. These
10189 tests will not catch constants in the first operand, but constants
10190 are rarely passed as the first operand. */
10192 switch (TREE_CODE (exp))
10194 case EQ_EXPR:
10195 code = EQ;
10196 break;
10197 case NE_EXPR:
10198 code = NE;
10199 break;
10200 case LT_EXPR:
10201 if (integer_onep (arg1))
10202 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10203 else
10204 code = unsignedp ? LTU : LT;
10205 break;
10206 case LE_EXPR:
10207 if (! unsignedp && integer_all_onesp (arg1))
10208 arg1 = integer_zero_node, code = LT;
10209 else
10210 code = unsignedp ? LEU : LE;
10211 break;
10212 case GT_EXPR:
10213 if (! unsignedp && integer_all_onesp (arg1))
10214 arg1 = integer_zero_node, code = GE;
10215 else
10216 code = unsignedp ? GTU : GT;
10217 break;
10218 case GE_EXPR:
10219 if (integer_onep (arg1))
10220 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10221 else
10222 code = unsignedp ? GEU : GE;
10223 break;
10225 case UNORDERED_EXPR:
10226 code = UNORDERED;
10227 break;
10228 case ORDERED_EXPR:
10229 code = ORDERED;
10230 break;
10231 case UNLT_EXPR:
10232 code = UNLT;
10233 break;
10234 case UNLE_EXPR:
10235 code = UNLE;
10236 break;
10237 case UNGT_EXPR:
10238 code = UNGT;
10239 break;
10240 case UNGE_EXPR:
10241 code = UNGE;
10242 break;
10243 case UNEQ_EXPR:
10244 code = UNEQ;
10245 break;
10247 default:
10248 abort ();
10251 /* Put a constant second. */
10252 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10254 tem = arg0; arg0 = arg1; arg1 = tem;
10255 code = swap_condition (code);
10258 /* If this is an equality or inequality test of a single bit, we can
10259 do this by shifting the bit being tested to the low-order bit and
10260 masking the result with the constant 1. If the condition was EQ,
10261 we xor it with 1. This does not require an scc insn and is faster
10262 than an scc insn even if we have it. */
10264 if ((code == NE || code == EQ)
10265 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10266 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10268 tree inner = TREE_OPERAND (arg0, 0);
10269 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10270 int ops_unsignedp;
10272 /* If INNER is a right shift of a constant and it plus BITNUM does
10273 not overflow, adjust BITNUM and INNER. */
10275 if (TREE_CODE (inner) == RSHIFT_EXPR
10276 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10277 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10278 && bitnum < TYPE_PRECISION (type)
10279 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10280 bitnum - TYPE_PRECISION (type)))
10282 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10283 inner = TREE_OPERAND (inner, 0);
10286 /* If we are going to be able to omit the AND below, we must do our
10287 operations as unsigned. If we must use the AND, we have a choice.
10288 Normally unsigned is faster, but for some machines signed is. */
10289 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10290 #ifdef LOAD_EXTEND_OP
10291 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10292 #else
10294 #endif
10297 if (! get_subtarget (subtarget)
10298 || GET_MODE (subtarget) != operand_mode
10299 || ! safe_from_p (subtarget, inner, 1))
10300 subtarget = 0;
10302 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10304 if (bitnum != 0)
10305 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10306 size_int (bitnum), subtarget, ops_unsignedp);
10308 if (GET_MODE (op0) != mode)
10309 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10311 if ((code == EQ && ! invert) || (code == NE && invert))
10312 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10313 ops_unsignedp, OPTAB_LIB_WIDEN);
10315 /* Put the AND last so it can combine with more things. */
10316 if (bitnum != TYPE_PRECISION (type) - 1)
10317 op0 = expand_and (op0, const1_rtx, subtarget);
10319 return op0;
10322 /* Now see if we are likely to be able to do this. Return if not. */
10323 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10324 return 0;
10326 icode = setcc_gen_code[(int) code];
10327 if (icode == CODE_FOR_nothing
10328 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10330 /* We can only do this if it is one of the special cases that
10331 can be handled without an scc insn. */
10332 if ((code == LT && integer_zerop (arg1))
10333 || (! only_cheap && code == GE && integer_zerop (arg1)))
10335 else if (BRANCH_COST >= 0
10336 && ! only_cheap && (code == NE || code == EQ)
10337 && TREE_CODE (type) != REAL_TYPE
10338 && ((abs_optab->handlers[(int) operand_mode].insn_code
10339 != CODE_FOR_nothing)
10340 || (ffs_optab->handlers[(int) operand_mode].insn_code
10341 != CODE_FOR_nothing)))
10343 else
10344 return 0;
10347 preexpand_calls (exp);
10348 if (! get_subtarget (target)
10349 || GET_MODE (subtarget) != operand_mode
10350 || ! safe_from_p (subtarget, arg1, 1))
10351 subtarget = 0;
10353 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10354 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10356 if (target == 0)
10357 target = gen_reg_rtx (mode);
10359 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10360 because, if the emit_store_flag does anything it will succeed and
10361 OP0 and OP1 will not be used subsequently. */
10363 result = emit_store_flag (target, code,
10364 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10365 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10366 operand_mode, unsignedp, 1);
10368 if (result)
10370 if (invert)
10371 result = expand_binop (mode, xor_optab, result, const1_rtx,
10372 result, 0, OPTAB_LIB_WIDEN);
10373 return result;
10376 /* If this failed, we have to do this with set/compare/jump/set code. */
10377 if (GET_CODE (target) != REG
10378 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10379 target = gen_reg_rtx (GET_MODE (target));
10381 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10382 result = compare_from_rtx (op0, op1, code, unsignedp,
10383 operand_mode, NULL_RTX, 0);
10384 if (GET_CODE (result) == CONST_INT)
10385 return (((result == const0_rtx && ! invert)
10386 || (result != const0_rtx && invert))
10387 ? const0_rtx : const1_rtx);
10389 label = gen_label_rtx ();
10390 if (bcc_gen_fctn[(int) code] == 0)
10391 abort ();
10393 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10394 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10395 emit_label (label);
10397 return target;
10400 /* Generate a tablejump instruction (used for switch statements). */
10402 #ifdef HAVE_tablejump
10404 /* INDEX is the value being switched on, with the lowest value
10405 in the table already subtracted.
10406 MODE is its expected mode (needed if INDEX is constant).
10407 RANGE is the length of the jump table.
10408 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10410 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10411 index value is out of range. */
10413 void
10414 do_tablejump (index, mode, range, table_label, default_label)
10415 rtx index, range, table_label, default_label;
10416 enum machine_mode mode;
10418 register rtx temp, vector;
10420 /* Do an unsigned comparison (in the proper mode) between the index
10421 expression and the value which represents the length of the range.
10422 Since we just finished subtracting the lower bound of the range
10423 from the index expression, this comparison allows us to simultaneously
10424 check that the original index expression value is both greater than
10425 or equal to the minimum value of the range and less than or equal to
10426 the maximum value of the range. */
10428 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10429 0, default_label);
10431 /* If index is in range, it must fit in Pmode.
10432 Convert to Pmode so we can index with it. */
10433 if (mode != Pmode)
10434 index = convert_to_mode (Pmode, index, 1);
10436 /* Don't let a MEM slip thru, because then INDEX that comes
10437 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10438 and break_out_memory_refs will go to work on it and mess it up. */
10439 #ifdef PIC_CASE_VECTOR_ADDRESS
10440 if (flag_pic && GET_CODE (index) != REG)
10441 index = copy_to_mode_reg (Pmode, index);
10442 #endif
10444 /* If flag_force_addr were to affect this address
10445 it could interfere with the tricky assumptions made
10446 about addresses that contain label-refs,
10447 which may be valid only very near the tablejump itself. */
10448 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10449 GET_MODE_SIZE, because this indicates how large insns are. The other
10450 uses should all be Pmode, because they are addresses. This code
10451 could fail if addresses and insns are not the same size. */
10452 index = gen_rtx_PLUS (Pmode,
10453 gen_rtx_MULT (Pmode, index,
10454 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10455 gen_rtx_LABEL_REF (Pmode, table_label));
10456 #ifdef PIC_CASE_VECTOR_ADDRESS
10457 if (flag_pic)
10458 index = PIC_CASE_VECTOR_ADDRESS (index);
10459 else
10460 #endif
10461 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10462 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10463 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10464 RTX_UNCHANGING_P (vector) = 1;
10465 convert_move (temp, vector, 0);
10467 emit_jump_insn (gen_tablejump (temp, table_label));
10469 /* If we are generating PIC code or if the table is PC-relative, the
10470 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10471 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10472 emit_barrier ();
10475 #endif /* HAVE_tablejump */