* builtins.c (is_valid_printf_arglist, expand_builtin_printf):
[official-gcc.git] / gcc / expr.c
blobc36d61fcb9646cb6775a94aa747cfd7ffc206cc2
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000
3 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "insn-config.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37 #include "expr.h"
38 #include "recog.h"
39 #include "reload.h"
40 #include "output.h"
41 #include "typeclass.h"
42 #include "defaults.h"
43 #include "toplev.h"
44 #include "ggc.h"
45 #include "intl.h"
46 #include "tm_p.h"
48 #ifndef ACCUMULATE_OUTGOING_ARGS
49 #define ACCUMULATE_OUTGOING_ARGS 0
50 #endif
52 /* Supply a default definition for PUSH_ARGS. */
53 #ifndef PUSH_ARGS
54 #ifdef PUSH_ROUNDING
55 #define PUSH_ARGS !ACCUMULATE_OUTGOING_ARGS
56 #else
57 #define PUSH_ARGS 0
58 #endif
59 #endif
61 /* Decide whether a function's arguments should be processed
62 from first to last or from last to first.
64 They should if the stack and args grow in opposite directions, but
65 only if we have push insns. */
67 #ifdef PUSH_ROUNDING
69 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
70 #define PUSH_ARGS_REVERSED /* If it's last to first. */
71 #endif
73 #endif
75 #ifndef STACK_PUSH_CODE
76 #ifdef STACK_GROWS_DOWNWARD
77 #define STACK_PUSH_CODE PRE_DEC
78 #else
79 #define STACK_PUSH_CODE PRE_INC
80 #endif
81 #endif
83 /* Assume that case vectors are not pc-relative. */
84 #ifndef CASE_VECTOR_PC_RELATIVE
85 #define CASE_VECTOR_PC_RELATIVE 0
86 #endif
88 /* Hook called by safe_from_p for language-specific tree codes. It is
89 up to the language front-end to install a hook if it has any such
90 codes that safe_from_p needs to know about. Since same_from_p will
91 recursively explore the TREE_OPERANDs of an expression, this hook
92 should not reexamine those pieces. This routine may recursively
93 call safe_from_p; it should always pass `0' as the TOP_P
94 parameter. */
95 int (*lang_safe_from_p) PARAMS ((rtx, tree));
97 /* If this is nonzero, we do not bother generating VOLATILE
98 around volatile memory references, and we are willing to
99 output indirect addresses. If cse is to follow, we reject
100 indirect addresses so a useful potential cse is generated;
101 if it is used only once, instruction combination will produce
102 the same indirect address eventually. */
103 int cse_not_expected;
105 /* Nonzero to generate code for all the subroutines within an
106 expression before generating the upper levels of the expression.
107 Nowadays this is never zero. */
108 int do_preexpand_calls = 1;
110 /* Don't check memory usage, since code is being emitted to check a memory
111 usage. Used when current_function_check_memory_usage is true, to avoid
112 infinite recursion. */
113 static int in_check_memory_usage;
115 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
116 static tree placeholder_list = 0;
118 /* This structure is used by move_by_pieces to describe the move to
119 be performed. */
120 struct move_by_pieces
122 rtx to;
123 rtx to_addr;
124 int autinc_to;
125 int explicit_inc_to;
126 rtx from;
127 rtx from_addr;
128 int autinc_from;
129 int explicit_inc_from;
130 unsigned HOST_WIDE_INT len;
131 HOST_WIDE_INT offset;
132 int reverse;
135 /* This structure is used by clear_by_pieces to describe the clear to
136 be performed. */
138 struct clear_by_pieces
140 rtx to;
141 rtx to_addr;
142 int autinc_to;
143 int explicit_inc_to;
144 unsigned HOST_WIDE_INT len;
145 HOST_WIDE_INT offset;
146 int reverse;
149 extern struct obstack permanent_obstack;
151 static rtx get_push_address PARAMS ((int));
153 static rtx enqueue_insn PARAMS ((rtx, rtx));
154 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
155 PARAMS ((unsigned HOST_WIDE_INT,
156 unsigned int));
157 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
158 struct move_by_pieces *));
159 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
160 unsigned int));
161 static void clear_by_pieces_1 PARAMS ((rtx (*) (rtx, ...),
162 enum machine_mode,
163 struct clear_by_pieces *));
164 static rtx get_subtarget PARAMS ((rtx));
165 static int is_zeros_p PARAMS ((tree));
166 static int mostly_zeros_p PARAMS ((tree));
167 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
168 HOST_WIDE_INT, enum machine_mode,
169 tree, tree, unsigned int, int,
170 int));
171 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
172 HOST_WIDE_INT));
173 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
174 HOST_WIDE_INT, enum machine_mode,
175 tree, enum machine_mode, int,
176 unsigned int, HOST_WIDE_INT, int));
177 static enum memory_use_mode
178 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
179 static tree save_noncopied_parts PARAMS ((tree, tree));
180 static tree init_noncopied_parts PARAMS ((tree, tree));
181 static int fixed_type_p PARAMS ((tree));
182 static rtx var_rtx PARAMS ((tree));
183 static int readonly_fields_p PARAMS ((tree));
184 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
185 static rtx expand_increment PARAMS ((tree, int, int));
186 static void preexpand_calls PARAMS ((tree));
187 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
188 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
189 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
190 rtx, rtx));
191 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
193 /* Record for each mode whether we can move a register directly to or
194 from an object of that mode in memory. If we can't, we won't try
195 to use that mode directly when accessing a field of that mode. */
197 static char direct_load[NUM_MACHINE_MODES];
198 static char direct_store[NUM_MACHINE_MODES];
200 /* If a memory-to-memory move would take MOVE_RATIO or more simple
201 move-instruction sequences, we will do a movstr or libcall instead. */
203 #ifndef MOVE_RATIO
204 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
205 #define MOVE_RATIO 2
206 #else
207 /* If we are optimizing for space (-Os), cut down the default move ratio. */
208 #define MOVE_RATIO (optimize_size ? 3 : 15)
209 #endif
210 #endif
212 /* This macro is used to determine whether move_by_pieces should be called
213 to perform a structure copy. */
214 #ifndef MOVE_BY_PIECES_P
215 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
216 (move_by_pieces_ninsns (SIZE, ALIGN) < MOVE_RATIO)
217 #endif
219 /* This array records the insn_code of insns to perform block moves. */
220 enum insn_code movstr_optab[NUM_MACHINE_MODES];
222 /* This array records the insn_code of insns to perform block clears. */
223 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
225 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
227 #ifndef SLOW_UNALIGNED_ACCESS
228 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
229 #endif
231 /* This is run once per compilation to set up which modes can be used
232 directly in memory and to initialize the block move optab. */
234 void
235 init_expr_once ()
237 rtx insn, pat;
238 enum machine_mode mode;
239 int num_clobbers;
240 rtx mem, mem1;
241 char *free_point;
243 start_sequence ();
245 /* Since we are on the permanent obstack, we must be sure we save this
246 spot AFTER we call start_sequence, since it will reuse the rtl it
247 makes. */
248 free_point = (char *) oballoc (0);
250 /* Try indexing by frame ptr and try by stack ptr.
251 It is known that on the Convex the stack ptr isn't a valid index.
252 With luck, one or the other is valid on any machine. */
253 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
254 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
256 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
257 pat = PATTERN (insn);
259 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
260 mode = (enum machine_mode) ((int) mode + 1))
262 int regno;
263 rtx reg;
265 direct_load[(int) mode] = direct_store[(int) mode] = 0;
266 PUT_MODE (mem, mode);
267 PUT_MODE (mem1, mode);
269 /* See if there is some register that can be used in this mode and
270 directly loaded or stored from memory. */
272 if (mode != VOIDmode && mode != BLKmode)
273 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
274 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
275 regno++)
277 if (! HARD_REGNO_MODE_OK (regno, mode))
278 continue;
280 reg = gen_rtx_REG (mode, regno);
282 SET_SRC (pat) = mem;
283 SET_DEST (pat) = reg;
284 if (recog (pat, insn, &num_clobbers) >= 0)
285 direct_load[(int) mode] = 1;
287 SET_SRC (pat) = mem1;
288 SET_DEST (pat) = reg;
289 if (recog (pat, insn, &num_clobbers) >= 0)
290 direct_load[(int) mode] = 1;
292 SET_SRC (pat) = reg;
293 SET_DEST (pat) = mem;
294 if (recog (pat, insn, &num_clobbers) >= 0)
295 direct_store[(int) mode] = 1;
297 SET_SRC (pat) = reg;
298 SET_DEST (pat) = mem1;
299 if (recog (pat, insn, &num_clobbers) >= 0)
300 direct_store[(int) mode] = 1;
304 end_sequence ();
305 obfree (free_point);
308 /* This is run at the start of compiling a function. */
310 void
311 init_expr ()
313 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
315 pending_chain = 0;
316 pending_stack_adjust = 0;
317 stack_pointer_delta = 0;
318 inhibit_defer_pop = 0;
319 saveregs_value = 0;
320 apply_args_value = 0;
321 forced_labels = 0;
324 void
325 mark_expr_status (p)
326 struct expr_status *p;
328 if (p == NULL)
329 return;
331 ggc_mark_rtx (p->x_saveregs_value);
332 ggc_mark_rtx (p->x_apply_args_value);
333 ggc_mark_rtx (p->x_forced_labels);
336 void
337 free_expr_status (f)
338 struct function *f;
340 free (f->expr);
341 f->expr = NULL;
344 /* Small sanity check that the queue is empty at the end of a function. */
346 void
347 finish_expr_for_function ()
349 if (pending_chain)
350 abort ();
353 /* Manage the queue of increment instructions to be output
354 for POSTINCREMENT_EXPR expressions, etc. */
356 /* Queue up to increment (or change) VAR later. BODY says how:
357 BODY should be the same thing you would pass to emit_insn
358 to increment right away. It will go to emit_insn later on.
360 The value is a QUEUED expression to be used in place of VAR
361 where you want to guarantee the pre-incrementation value of VAR. */
363 static rtx
364 enqueue_insn (var, body)
365 rtx var, body;
367 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
368 body, pending_chain);
369 return pending_chain;
372 /* Use protect_from_queue to convert a QUEUED expression
373 into something that you can put immediately into an instruction.
374 If the queued incrementation has not happened yet,
375 protect_from_queue returns the variable itself.
376 If the incrementation has happened, protect_from_queue returns a temp
377 that contains a copy of the old value of the variable.
379 Any time an rtx which might possibly be a QUEUED is to be put
380 into an instruction, it must be passed through protect_from_queue first.
381 QUEUED expressions are not meaningful in instructions.
383 Do not pass a value through protect_from_queue and then hold
384 on to it for a while before putting it in an instruction!
385 If the queue is flushed in between, incorrect code will result. */
388 protect_from_queue (x, modify)
389 register rtx x;
390 int modify;
392 register RTX_CODE code = GET_CODE (x);
394 #if 0 /* A QUEUED can hang around after the queue is forced out. */
395 /* Shortcut for most common case. */
396 if (pending_chain == 0)
397 return x;
398 #endif
400 if (code != QUEUED)
402 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
403 use of autoincrement. Make a copy of the contents of the memory
404 location rather than a copy of the address, but not if the value is
405 of mode BLKmode. Don't modify X in place since it might be
406 shared. */
407 if (code == MEM && GET_MODE (x) != BLKmode
408 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
410 register rtx y = XEXP (x, 0);
411 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
413 MEM_COPY_ATTRIBUTES (new, x);
415 if (QUEUED_INSN (y))
417 register rtx temp = gen_reg_rtx (GET_MODE (new));
418 emit_insn_before (gen_move_insn (temp, new),
419 QUEUED_INSN (y));
420 return temp;
422 return new;
424 /* Otherwise, recursively protect the subexpressions of all
425 the kinds of rtx's that can contain a QUEUED. */
426 if (code == MEM)
428 rtx tem = protect_from_queue (XEXP (x, 0), 0);
429 if (tem != XEXP (x, 0))
431 x = copy_rtx (x);
432 XEXP (x, 0) = tem;
435 else if (code == PLUS || code == MULT)
437 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
438 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
439 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
441 x = copy_rtx (x);
442 XEXP (x, 0) = new0;
443 XEXP (x, 1) = new1;
446 return x;
448 /* If the increment has not happened, use the variable itself. */
449 if (QUEUED_INSN (x) == 0)
450 return QUEUED_VAR (x);
451 /* If the increment has happened and a pre-increment copy exists,
452 use that copy. */
453 if (QUEUED_COPY (x) != 0)
454 return QUEUED_COPY (x);
455 /* The increment has happened but we haven't set up a pre-increment copy.
456 Set one up now, and use it. */
457 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
458 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
459 QUEUED_INSN (x));
460 return QUEUED_COPY (x);
463 /* Return nonzero if X contains a QUEUED expression:
464 if it contains anything that will be altered by a queued increment.
465 We handle only combinations of MEM, PLUS, MINUS and MULT operators
466 since memory addresses generally contain only those. */
469 queued_subexp_p (x)
470 rtx x;
472 register enum rtx_code code = GET_CODE (x);
473 switch (code)
475 case QUEUED:
476 return 1;
477 case MEM:
478 return queued_subexp_p (XEXP (x, 0));
479 case MULT:
480 case PLUS:
481 case MINUS:
482 return (queued_subexp_p (XEXP (x, 0))
483 || queued_subexp_p (XEXP (x, 1)));
484 default:
485 return 0;
489 /* Perform all the pending incrementations. */
491 void
492 emit_queue ()
494 register rtx p;
495 while ((p = pending_chain))
497 rtx body = QUEUED_BODY (p);
499 if (GET_CODE (body) == SEQUENCE)
501 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
502 emit_insn (QUEUED_BODY (p));
504 else
505 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
506 pending_chain = QUEUED_NEXT (p);
510 /* Copy data from FROM to TO, where the machine modes are not the same.
511 Both modes may be integer, or both may be floating.
512 UNSIGNEDP should be nonzero if FROM is an unsigned type.
513 This causes zero-extension instead of sign-extension. */
515 void
516 convert_move (to, from, unsignedp)
517 register rtx to, from;
518 int unsignedp;
520 enum machine_mode to_mode = GET_MODE (to);
521 enum machine_mode from_mode = GET_MODE (from);
522 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
523 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
524 enum insn_code code;
525 rtx libcall;
527 /* rtx code for making an equivalent value. */
528 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
530 to = protect_from_queue (to, 1);
531 from = protect_from_queue (from, 0);
533 if (to_real != from_real)
534 abort ();
536 /* If FROM is a SUBREG that indicates that we have already done at least
537 the required extension, strip it. We don't handle such SUBREGs as
538 TO here. */
540 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
541 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
542 >= GET_MODE_SIZE (to_mode))
543 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
544 from = gen_lowpart (to_mode, from), from_mode = to_mode;
546 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
547 abort ();
549 if (to_mode == from_mode
550 || (from_mode == VOIDmode && CONSTANT_P (from)))
552 emit_move_insn (to, from);
553 return;
556 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
558 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
559 abort ();
561 if (VECTOR_MODE_P (to_mode))
562 from = gen_rtx_SUBREG (to_mode, from, 0);
563 else
564 to = gen_rtx_SUBREG (from_mode, to, 0);
566 emit_move_insn (to, from);
567 return;
570 if (to_real != from_real)
571 abort ();
573 if (to_real)
575 rtx value, insns;
577 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
579 /* Try converting directly if the insn is supported. */
580 if ((code = can_extend_p (to_mode, from_mode, 0))
581 != CODE_FOR_nothing)
583 emit_unop_insn (code, to, from, UNKNOWN);
584 return;
588 #ifdef HAVE_trunchfqf2
589 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
591 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
592 return;
594 #endif
595 #ifdef HAVE_trunctqfqf2
596 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
598 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
599 return;
601 #endif
602 #ifdef HAVE_truncsfqf2
603 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
605 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
606 return;
608 #endif
609 #ifdef HAVE_truncdfqf2
610 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
612 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
613 return;
615 #endif
616 #ifdef HAVE_truncxfqf2
617 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
619 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
620 return;
622 #endif
623 #ifdef HAVE_trunctfqf2
624 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
626 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
627 return;
629 #endif
631 #ifdef HAVE_trunctqfhf2
632 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
634 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
635 return;
637 #endif
638 #ifdef HAVE_truncsfhf2
639 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
641 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
642 return;
644 #endif
645 #ifdef HAVE_truncdfhf2
646 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
648 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
649 return;
651 #endif
652 #ifdef HAVE_truncxfhf2
653 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
655 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
656 return;
658 #endif
659 #ifdef HAVE_trunctfhf2
660 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
662 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
663 return;
665 #endif
667 #ifdef HAVE_truncsftqf2
668 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
670 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
671 return;
673 #endif
674 #ifdef HAVE_truncdftqf2
675 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
677 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
678 return;
680 #endif
681 #ifdef HAVE_truncxftqf2
682 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
684 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
685 return;
687 #endif
688 #ifdef HAVE_trunctftqf2
689 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
691 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
692 return;
694 #endif
696 #ifdef HAVE_truncdfsf2
697 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
699 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
700 return;
702 #endif
703 #ifdef HAVE_truncxfsf2
704 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
706 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
707 return;
709 #endif
710 #ifdef HAVE_trunctfsf2
711 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
713 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
714 return;
716 #endif
717 #ifdef HAVE_truncxfdf2
718 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
720 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
721 return;
723 #endif
724 #ifdef HAVE_trunctfdf2
725 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
727 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
728 return;
730 #endif
732 libcall = (rtx) 0;
733 switch (from_mode)
735 case SFmode:
736 switch (to_mode)
738 case DFmode:
739 libcall = extendsfdf2_libfunc;
740 break;
742 case XFmode:
743 libcall = extendsfxf2_libfunc;
744 break;
746 case TFmode:
747 libcall = extendsftf2_libfunc;
748 break;
750 default:
751 break;
753 break;
755 case DFmode:
756 switch (to_mode)
758 case SFmode:
759 libcall = truncdfsf2_libfunc;
760 break;
762 case XFmode:
763 libcall = extenddfxf2_libfunc;
764 break;
766 case TFmode:
767 libcall = extenddftf2_libfunc;
768 break;
770 default:
771 break;
773 break;
775 case XFmode:
776 switch (to_mode)
778 case SFmode:
779 libcall = truncxfsf2_libfunc;
780 break;
782 case DFmode:
783 libcall = truncxfdf2_libfunc;
784 break;
786 default:
787 break;
789 break;
791 case TFmode:
792 switch (to_mode)
794 case SFmode:
795 libcall = trunctfsf2_libfunc;
796 break;
798 case DFmode:
799 libcall = trunctfdf2_libfunc;
800 break;
802 default:
803 break;
805 break;
807 default:
808 break;
811 if (libcall == (rtx) 0)
812 /* This conversion is not implemented yet. */
813 abort ();
815 start_sequence ();
816 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
817 1, from, from_mode);
818 insns = get_insns ();
819 end_sequence ();
820 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
821 from));
822 return;
825 /* Now both modes are integers. */
827 /* Handle expanding beyond a word. */
828 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
829 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
831 rtx insns;
832 rtx lowpart;
833 rtx fill_value;
834 rtx lowfrom;
835 int i;
836 enum machine_mode lowpart_mode;
837 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
839 /* Try converting directly if the insn is supported. */
840 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
841 != CODE_FOR_nothing)
843 /* If FROM is a SUBREG, put it into a register. Do this
844 so that we always generate the same set of insns for
845 better cse'ing; if an intermediate assignment occurred,
846 we won't be doing the operation directly on the SUBREG. */
847 if (optimize > 0 && GET_CODE (from) == SUBREG)
848 from = force_reg (from_mode, from);
849 emit_unop_insn (code, to, from, equiv_code);
850 return;
852 /* Next, try converting via full word. */
853 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
854 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
855 != CODE_FOR_nothing))
857 if (GET_CODE (to) == REG)
858 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
859 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
860 emit_unop_insn (code, to,
861 gen_lowpart (word_mode, to), equiv_code);
862 return;
865 /* No special multiword conversion insn; do it by hand. */
866 start_sequence ();
868 /* Since we will turn this into a no conflict block, we must ensure
869 that the source does not overlap the target. */
871 if (reg_overlap_mentioned_p (to, from))
872 from = force_reg (from_mode, from);
874 /* Get a copy of FROM widened to a word, if necessary. */
875 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
876 lowpart_mode = word_mode;
877 else
878 lowpart_mode = from_mode;
880 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
882 lowpart = gen_lowpart (lowpart_mode, to);
883 emit_move_insn (lowpart, lowfrom);
885 /* Compute the value to put in each remaining word. */
886 if (unsignedp)
887 fill_value = const0_rtx;
888 else
890 #ifdef HAVE_slt
891 if (HAVE_slt
892 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
893 && STORE_FLAG_VALUE == -1)
895 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
896 lowpart_mode, 0, 0);
897 fill_value = gen_reg_rtx (word_mode);
898 emit_insn (gen_slt (fill_value));
900 else
901 #endif
903 fill_value
904 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
905 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906 NULL_RTX, 0);
907 fill_value = convert_to_mode (word_mode, fill_value, 1);
911 /* Fill the remaining words. */
912 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
914 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
915 rtx subword = operand_subword (to, index, 1, to_mode);
917 if (subword == 0)
918 abort ();
920 if (fill_value != subword)
921 emit_move_insn (subword, fill_value);
924 insns = get_insns ();
925 end_sequence ();
927 emit_no_conflict_block (insns, to, from, NULL_RTX,
928 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
929 return;
932 /* Truncating multi-word to a word or less. */
933 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
934 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
936 if (!((GET_CODE (from) == MEM
937 && ! MEM_VOLATILE_P (from)
938 && direct_load[(int) to_mode]
939 && ! mode_dependent_address_p (XEXP (from, 0)))
940 || GET_CODE (from) == REG
941 || GET_CODE (from) == SUBREG))
942 from = force_reg (from_mode, from);
943 convert_move (to, gen_lowpart (word_mode, from), 0);
944 return;
947 /* Handle pointer conversion. */ /* SPEE 900220. */
948 if (to_mode == PQImode)
950 if (from_mode != QImode)
951 from = convert_to_mode (QImode, from, unsignedp);
953 #ifdef HAVE_truncqipqi2
954 if (HAVE_truncqipqi2)
956 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
957 return;
959 #endif /* HAVE_truncqipqi2 */
960 abort ();
963 if (from_mode == PQImode)
965 if (to_mode != QImode)
967 from = convert_to_mode (QImode, from, unsignedp);
968 from_mode = QImode;
970 else
972 #ifdef HAVE_extendpqiqi2
973 if (HAVE_extendpqiqi2)
975 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
976 return;
978 #endif /* HAVE_extendpqiqi2 */
979 abort ();
983 if (to_mode == PSImode)
985 if (from_mode != SImode)
986 from = convert_to_mode (SImode, from, unsignedp);
988 #ifdef HAVE_truncsipsi2
989 if (HAVE_truncsipsi2)
991 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
992 return;
994 #endif /* HAVE_truncsipsi2 */
995 abort ();
998 if (from_mode == PSImode)
1000 if (to_mode != SImode)
1002 from = convert_to_mode (SImode, from, unsignedp);
1003 from_mode = SImode;
1005 else
1007 #ifdef HAVE_extendpsisi2
1008 if (! unsignedp && HAVE_extendpsisi2)
1010 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1011 return;
1013 #endif /* HAVE_extendpsisi2 */
1014 #ifdef HAVE_zero_extendpsisi2
1015 if (unsignedp && HAVE_zero_extendpsisi2)
1017 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1018 return;
1020 #endif /* HAVE_zero_extendpsisi2 */
1021 abort ();
1025 if (to_mode == PDImode)
1027 if (from_mode != DImode)
1028 from = convert_to_mode (DImode, from, unsignedp);
1030 #ifdef HAVE_truncdipdi2
1031 if (HAVE_truncdipdi2)
1033 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1034 return;
1036 #endif /* HAVE_truncdipdi2 */
1037 abort ();
1040 if (from_mode == PDImode)
1042 if (to_mode != DImode)
1044 from = convert_to_mode (DImode, from, unsignedp);
1045 from_mode = DImode;
1047 else
1049 #ifdef HAVE_extendpdidi2
1050 if (HAVE_extendpdidi2)
1052 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1053 return;
1055 #endif /* HAVE_extendpdidi2 */
1056 abort ();
1060 /* Now follow all the conversions between integers
1061 no more than a word long. */
1063 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1064 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1065 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1066 GET_MODE_BITSIZE (from_mode)))
1068 if (!((GET_CODE (from) == MEM
1069 && ! MEM_VOLATILE_P (from)
1070 && direct_load[(int) to_mode]
1071 && ! mode_dependent_address_p (XEXP (from, 0)))
1072 || GET_CODE (from) == REG
1073 || GET_CODE (from) == SUBREG))
1074 from = force_reg (from_mode, from);
1075 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1076 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1077 from = copy_to_reg (from);
1078 emit_move_insn (to, gen_lowpart (to_mode, from));
1079 return;
1082 /* Handle extension. */
1083 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1085 /* Convert directly if that works. */
1086 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1087 != CODE_FOR_nothing)
1089 emit_unop_insn (code, to, from, equiv_code);
1090 return;
1092 else
1094 enum machine_mode intermediate;
1095 rtx tmp;
1096 tree shift_amount;
1098 /* Search for a mode to convert via. */
1099 for (intermediate = from_mode; intermediate != VOIDmode;
1100 intermediate = GET_MODE_WIDER_MODE (intermediate))
1101 if (((can_extend_p (to_mode, intermediate, unsignedp)
1102 != CODE_FOR_nothing)
1103 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1104 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1105 GET_MODE_BITSIZE (intermediate))))
1106 && (can_extend_p (intermediate, from_mode, unsignedp)
1107 != CODE_FOR_nothing))
1109 convert_move (to, convert_to_mode (intermediate, from,
1110 unsignedp), unsignedp);
1111 return;
1114 /* No suitable intermediate mode.
1115 Generate what we need with shifts. */
1116 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1117 - GET_MODE_BITSIZE (from_mode), 0);
1118 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1119 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1120 to, unsignedp);
1121 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1122 to, unsignedp);
1123 if (tmp != to)
1124 emit_move_insn (to, tmp);
1125 return;
1129 /* Support special truncate insns for certain modes. */
1131 if (from_mode == DImode && to_mode == SImode)
1133 #ifdef HAVE_truncdisi2
1134 if (HAVE_truncdisi2)
1136 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1137 return;
1139 #endif
1140 convert_move (to, force_reg (from_mode, from), unsignedp);
1141 return;
1144 if (from_mode == DImode && to_mode == HImode)
1146 #ifdef HAVE_truncdihi2
1147 if (HAVE_truncdihi2)
1149 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1150 return;
1152 #endif
1153 convert_move (to, force_reg (from_mode, from), unsignedp);
1154 return;
1157 if (from_mode == DImode && to_mode == QImode)
1159 #ifdef HAVE_truncdiqi2
1160 if (HAVE_truncdiqi2)
1162 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1163 return;
1165 #endif
1166 convert_move (to, force_reg (from_mode, from), unsignedp);
1167 return;
1170 if (from_mode == SImode && to_mode == HImode)
1172 #ifdef HAVE_truncsihi2
1173 if (HAVE_truncsihi2)
1175 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1176 return;
1178 #endif
1179 convert_move (to, force_reg (from_mode, from), unsignedp);
1180 return;
1183 if (from_mode == SImode && to_mode == QImode)
1185 #ifdef HAVE_truncsiqi2
1186 if (HAVE_truncsiqi2)
1188 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1189 return;
1191 #endif
1192 convert_move (to, force_reg (from_mode, from), unsignedp);
1193 return;
1196 if (from_mode == HImode && to_mode == QImode)
1198 #ifdef HAVE_trunchiqi2
1199 if (HAVE_trunchiqi2)
1201 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1202 return;
1204 #endif
1205 convert_move (to, force_reg (from_mode, from), unsignedp);
1206 return;
1209 if (from_mode == TImode && to_mode == DImode)
1211 #ifdef HAVE_trunctidi2
1212 if (HAVE_trunctidi2)
1214 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1215 return;
1217 #endif
1218 convert_move (to, force_reg (from_mode, from), unsignedp);
1219 return;
1222 if (from_mode == TImode && to_mode == SImode)
1224 #ifdef HAVE_trunctisi2
1225 if (HAVE_trunctisi2)
1227 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1228 return;
1230 #endif
1231 convert_move (to, force_reg (from_mode, from), unsignedp);
1232 return;
1235 if (from_mode == TImode && to_mode == HImode)
1237 #ifdef HAVE_trunctihi2
1238 if (HAVE_trunctihi2)
1240 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1241 return;
1243 #endif
1244 convert_move (to, force_reg (from_mode, from), unsignedp);
1245 return;
1248 if (from_mode == TImode && to_mode == QImode)
1250 #ifdef HAVE_trunctiqi2
1251 if (HAVE_trunctiqi2)
1253 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1254 return;
1256 #endif
1257 convert_move (to, force_reg (from_mode, from), unsignedp);
1258 return;
1261 /* Handle truncation of volatile memrefs, and so on;
1262 the things that couldn't be truncated directly,
1263 and for which there was no special instruction. */
1264 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1266 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1267 emit_move_insn (to, temp);
1268 return;
1271 /* Mode combination is not recognized. */
1272 abort ();
1275 /* Return an rtx for a value that would result
1276 from converting X to mode MODE.
1277 Both X and MODE may be floating, or both integer.
1278 UNSIGNEDP is nonzero if X is an unsigned value.
1279 This can be done by referring to a part of X in place
1280 or by copying to a new temporary with conversion.
1282 This function *must not* call protect_from_queue
1283 except when putting X into an insn (in which case convert_move does it). */
1286 convert_to_mode (mode, x, unsignedp)
1287 enum machine_mode mode;
1288 rtx x;
1289 int unsignedp;
1291 return convert_modes (mode, VOIDmode, x, unsignedp);
1294 /* Return an rtx for a value that would result
1295 from converting X from mode OLDMODE to mode MODE.
1296 Both modes may be floating, or both integer.
1297 UNSIGNEDP is nonzero if X is an unsigned value.
1299 This can be done by referring to a part of X in place
1300 or by copying to a new temporary with conversion.
1302 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1304 This function *must not* call protect_from_queue
1305 except when putting X into an insn (in which case convert_move does it). */
1308 convert_modes (mode, oldmode, x, unsignedp)
1309 enum machine_mode mode, oldmode;
1310 rtx x;
1311 int unsignedp;
1313 register rtx temp;
1315 /* If FROM is a SUBREG that indicates that we have already done at least
1316 the required extension, strip it. */
1318 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1319 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1320 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1321 x = gen_lowpart (mode, x);
1323 if (GET_MODE (x) != VOIDmode)
1324 oldmode = GET_MODE (x);
1326 if (mode == oldmode)
1327 return x;
1329 /* There is one case that we must handle specially: If we are converting
1330 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1331 we are to interpret the constant as unsigned, gen_lowpart will do
1332 the wrong if the constant appears negative. What we want to do is
1333 make the high-order word of the constant zero, not all ones. */
1335 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1336 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1337 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1339 HOST_WIDE_INT val = INTVAL (x);
1341 if (oldmode != VOIDmode
1342 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1344 int width = GET_MODE_BITSIZE (oldmode);
1346 /* We need to zero extend VAL. */
1347 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1350 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1353 /* We can do this with a gen_lowpart if both desired and current modes
1354 are integer, and this is either a constant integer, a register, or a
1355 non-volatile MEM. Except for the constant case where MODE is no
1356 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1358 if ((GET_CODE (x) == CONST_INT
1359 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1360 || (GET_MODE_CLASS (mode) == MODE_INT
1361 && GET_MODE_CLASS (oldmode) == MODE_INT
1362 && (GET_CODE (x) == CONST_DOUBLE
1363 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1364 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1365 && direct_load[(int) mode])
1366 || (GET_CODE (x) == REG
1367 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1368 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1370 /* ?? If we don't know OLDMODE, we have to assume here that
1371 X does not need sign- or zero-extension. This may not be
1372 the case, but it's the best we can do. */
1373 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1374 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1376 HOST_WIDE_INT val = INTVAL (x);
1377 int width = GET_MODE_BITSIZE (oldmode);
1379 /* We must sign or zero-extend in this case. Start by
1380 zero-extending, then sign extend if we need to. */
1381 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1382 if (! unsignedp
1383 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1384 val |= (HOST_WIDE_INT) (-1) << width;
1386 return GEN_INT (val);
1389 return gen_lowpart (mode, x);
1392 temp = gen_reg_rtx (mode);
1393 convert_move (temp, x, unsignedp);
1394 return temp;
1397 /* This macro is used to determine what the largest unit size that
1398 move_by_pieces can use is. */
1400 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1401 move efficiently, as opposed to MOVE_MAX which is the maximum
1402 number of bytes we can move with a single instruction. */
1404 #ifndef MOVE_MAX_PIECES
1405 #define MOVE_MAX_PIECES MOVE_MAX
1406 #endif
1408 /* Generate several move instructions to copy LEN bytes
1409 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1410 The caller must pass FROM and TO
1411 through protect_from_queue before calling.
1412 ALIGN is maximum alignment we can assume. */
1414 void
1415 move_by_pieces (to, from, len, align)
1416 rtx to, from;
1417 unsigned HOST_WIDE_INT len;
1418 unsigned int align;
1420 struct move_by_pieces data;
1421 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1422 unsigned int max_size = MOVE_MAX_PIECES + 1;
1423 enum machine_mode mode = VOIDmode, tmode;
1424 enum insn_code icode;
1426 data.offset = 0;
1427 data.to_addr = to_addr;
1428 data.from_addr = from_addr;
1429 data.to = to;
1430 data.from = from;
1431 data.autinc_to
1432 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1433 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1434 data.autinc_from
1435 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1436 || GET_CODE (from_addr) == POST_INC
1437 || GET_CODE (from_addr) == POST_DEC);
1439 data.explicit_inc_from = 0;
1440 data.explicit_inc_to = 0;
1441 data.reverse
1442 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1443 if (data.reverse) data.offset = len;
1444 data.len = len;
1446 /* If copying requires more than two move insns,
1447 copy addresses to registers (to make displacements shorter)
1448 and use post-increment if available. */
1449 if (!(data.autinc_from && data.autinc_to)
1450 && move_by_pieces_ninsns (len, align) > 2)
1452 /* Find the mode of the largest move... */
1453 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1454 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1455 if (GET_MODE_SIZE (tmode) < max_size)
1456 mode = tmode;
1458 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1460 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1461 data.autinc_from = 1;
1462 data.explicit_inc_from = -1;
1464 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1466 data.from_addr = copy_addr_to_reg (from_addr);
1467 data.autinc_from = 1;
1468 data.explicit_inc_from = 1;
1470 if (!data.autinc_from && CONSTANT_P (from_addr))
1471 data.from_addr = copy_addr_to_reg (from_addr);
1472 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1474 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1475 data.autinc_to = 1;
1476 data.explicit_inc_to = -1;
1478 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1480 data.to_addr = copy_addr_to_reg (to_addr);
1481 data.autinc_to = 1;
1482 data.explicit_inc_to = 1;
1484 if (!data.autinc_to && CONSTANT_P (to_addr))
1485 data.to_addr = copy_addr_to_reg (to_addr);
1488 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1489 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1490 align = MOVE_MAX * BITS_PER_UNIT;
1492 /* First move what we can in the largest integer mode, then go to
1493 successively smaller modes. */
1495 while (max_size > 1)
1497 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1498 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1499 if (GET_MODE_SIZE (tmode) < max_size)
1500 mode = tmode;
1502 if (mode == VOIDmode)
1503 break;
1505 icode = mov_optab->handlers[(int) mode].insn_code;
1506 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1507 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1509 max_size = GET_MODE_SIZE (mode);
1512 /* The code above should have handled everything. */
1513 if (data.len > 0)
1514 abort ();
1517 /* Return number of insns required to move L bytes by pieces.
1518 ALIGN (in bytes) is maximum alignment we can assume. */
1520 static unsigned HOST_WIDE_INT
1521 move_by_pieces_ninsns (l, align)
1522 unsigned HOST_WIDE_INT l;
1523 unsigned int align;
1525 unsigned HOST_WIDE_INT n_insns = 0;
1526 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1528 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1529 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1530 align = MOVE_MAX * BITS_PER_UNIT;
1532 while (max_size > 1)
1534 enum machine_mode mode = VOIDmode, tmode;
1535 enum insn_code icode;
1537 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1538 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1539 if (GET_MODE_SIZE (tmode) < max_size)
1540 mode = tmode;
1542 if (mode == VOIDmode)
1543 break;
1545 icode = mov_optab->handlers[(int) mode].insn_code;
1546 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1547 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1549 max_size = GET_MODE_SIZE (mode);
1552 return n_insns;
1555 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1556 with move instructions for mode MODE. GENFUN is the gen_... function
1557 to make a move insn for that mode. DATA has all the other info. */
1559 static void
1560 move_by_pieces_1 (genfun, mode, data)
1561 rtx (*genfun) PARAMS ((rtx, ...));
1562 enum machine_mode mode;
1563 struct move_by_pieces *data;
1565 unsigned int size = GET_MODE_SIZE (mode);
1566 rtx to1, from1;
1568 while (data->len >= size)
1570 if (data->reverse)
1571 data->offset -= size;
1573 if (data->autinc_to)
1575 to1 = gen_rtx_MEM (mode, data->to_addr);
1576 MEM_COPY_ATTRIBUTES (to1, data->to);
1578 else
1579 to1 = change_address (data->to, mode,
1580 plus_constant (data->to_addr, data->offset));
1582 if (data->autinc_from)
1584 from1 = gen_rtx_MEM (mode, data->from_addr);
1585 MEM_COPY_ATTRIBUTES (from1, data->from);
1587 else
1588 from1 = change_address (data->from, mode,
1589 plus_constant (data->from_addr, data->offset));
1591 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1592 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1593 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1594 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1596 emit_insn ((*genfun) (to1, from1));
1598 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1599 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1600 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1601 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1603 if (! data->reverse)
1604 data->offset += size;
1606 data->len -= size;
1610 /* Emit code to move a block Y to a block X.
1611 This may be done with string-move instructions,
1612 with multiple scalar move instructions, or with a library call.
1614 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1615 with mode BLKmode.
1616 SIZE is an rtx that says how long they are.
1617 ALIGN is the maximum alignment we can assume they have.
1619 Return the address of the new block, if memcpy is called and returns it,
1620 0 otherwise. */
1623 emit_block_move (x, y, size, align)
1624 rtx x, y;
1625 rtx size;
1626 unsigned int align;
1628 rtx retval = 0;
1629 #ifdef TARGET_MEM_FUNCTIONS
1630 static tree fn;
1631 tree call_expr, arg_list;
1632 #endif
1634 if (GET_MODE (x) != BLKmode)
1635 abort ();
1637 if (GET_MODE (y) != BLKmode)
1638 abort ();
1640 x = protect_from_queue (x, 1);
1641 y = protect_from_queue (y, 0);
1642 size = protect_from_queue (size, 0);
1644 if (GET_CODE (x) != MEM)
1645 abort ();
1646 if (GET_CODE (y) != MEM)
1647 abort ();
1648 if (size == 0)
1649 abort ();
1651 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1652 move_by_pieces (x, y, INTVAL (size), align);
1653 else
1655 /* Try the most limited insn first, because there's no point
1656 including more than one in the machine description unless
1657 the more limited one has some advantage. */
1659 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1660 enum machine_mode mode;
1662 /* Since this is a move insn, we don't care about volatility. */
1663 volatile_ok = 1;
1665 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1666 mode = GET_MODE_WIDER_MODE (mode))
1668 enum insn_code code = movstr_optab[(int) mode];
1669 insn_operand_predicate_fn pred;
1671 if (code != CODE_FOR_nothing
1672 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1673 here because if SIZE is less than the mode mask, as it is
1674 returned by the macro, it will definitely be less than the
1675 actual mode mask. */
1676 && ((GET_CODE (size) == CONST_INT
1677 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1678 <= (GET_MODE_MASK (mode) >> 1)))
1679 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1680 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1681 || (*pred) (x, BLKmode))
1682 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1683 || (*pred) (y, BLKmode))
1684 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1685 || (*pred) (opalign, VOIDmode)))
1687 rtx op2;
1688 rtx last = get_last_insn ();
1689 rtx pat;
1691 op2 = convert_to_mode (mode, size, 1);
1692 pred = insn_data[(int) code].operand[2].predicate;
1693 if (pred != 0 && ! (*pred) (op2, mode))
1694 op2 = copy_to_mode_reg (mode, op2);
1696 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1697 if (pat)
1699 emit_insn (pat);
1700 volatile_ok = 0;
1701 return 0;
1703 else
1704 delete_insns_since (last);
1708 volatile_ok = 0;
1710 /* X, Y, or SIZE may have been passed through protect_from_queue.
1712 It is unsafe to save the value generated by protect_from_queue
1713 and reuse it later. Consider what happens if emit_queue is
1714 called before the return value from protect_from_queue is used.
1716 Expansion of the CALL_EXPR below will call emit_queue before
1717 we are finished emitting RTL for argument setup. So if we are
1718 not careful we could get the wrong value for an argument.
1720 To avoid this problem we go ahead and emit code to copy X, Y &
1721 SIZE into new pseudos. We can then place those new pseudos
1722 into an RTL_EXPR and use them later, even after a call to
1723 emit_queue.
1725 Note this is not strictly needed for library calls since they
1726 do not call emit_queue before loading their arguments. However,
1727 we may need to have library calls call emit_queue in the future
1728 since failing to do so could cause problems for targets which
1729 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1730 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1731 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1733 #ifdef TARGET_MEM_FUNCTIONS
1734 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1735 #else
1736 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1737 TREE_UNSIGNED (integer_type_node));
1738 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1739 #endif
1741 #ifdef TARGET_MEM_FUNCTIONS
1742 /* It is incorrect to use the libcall calling conventions to call
1743 memcpy in this context.
1745 This could be a user call to memcpy and the user may wish to
1746 examine the return value from memcpy.
1748 For targets where libcalls and normal calls have different conventions
1749 for returning pointers, we could end up generating incorrect code.
1751 So instead of using a libcall sequence we build up a suitable
1752 CALL_EXPR and expand the call in the normal fashion. */
1753 if (fn == NULL_TREE)
1755 tree fntype;
1757 /* This was copied from except.c, I don't know if all this is
1758 necessary in this context or not. */
1759 fn = get_identifier ("memcpy");
1760 push_obstacks_nochange ();
1761 end_temporary_allocation ();
1762 fntype = build_pointer_type (void_type_node);
1763 fntype = build_function_type (fntype, NULL_TREE);
1764 fn = build_decl (FUNCTION_DECL, fn, fntype);
1765 ggc_add_tree_root (&fn, 1);
1766 DECL_EXTERNAL (fn) = 1;
1767 TREE_PUBLIC (fn) = 1;
1768 DECL_ARTIFICIAL (fn) = 1;
1769 make_decl_rtl (fn, NULL_PTR, 1);
1770 assemble_external (fn);
1771 pop_obstacks ();
1774 /* We need to make an argument list for the function call.
1776 memcpy has three arguments, the first two are void * addresses and
1777 the last is a size_t byte count for the copy. */
1778 arg_list
1779 = build_tree_list (NULL_TREE,
1780 make_tree (build_pointer_type (void_type_node), x));
1781 TREE_CHAIN (arg_list)
1782 = build_tree_list (NULL_TREE,
1783 make_tree (build_pointer_type (void_type_node), y));
1784 TREE_CHAIN (TREE_CHAIN (arg_list))
1785 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1786 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1788 /* Now we have to build up the CALL_EXPR itself. */
1789 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1790 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1791 call_expr, arg_list, NULL_TREE);
1792 TREE_SIDE_EFFECTS (call_expr) = 1;
1794 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1795 #else
1796 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1797 VOIDmode, 3, y, Pmode, x, Pmode,
1798 convert_to_mode (TYPE_MODE (integer_type_node), size,
1799 TREE_UNSIGNED (integer_type_node)),
1800 TYPE_MODE (integer_type_node));
1801 #endif
1804 return retval;
1807 /* Copy all or part of a value X into registers starting at REGNO.
1808 The number of registers to be filled is NREGS. */
1810 void
1811 move_block_to_reg (regno, x, nregs, mode)
1812 int regno;
1813 rtx x;
1814 int nregs;
1815 enum machine_mode mode;
1817 int i;
1818 #ifdef HAVE_load_multiple
1819 rtx pat;
1820 rtx last;
1821 #endif
1823 if (nregs == 0)
1824 return;
1826 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1827 x = validize_mem (force_const_mem (mode, x));
1829 /* See if the machine can do this with a load multiple insn. */
1830 #ifdef HAVE_load_multiple
1831 if (HAVE_load_multiple)
1833 last = get_last_insn ();
1834 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1835 GEN_INT (nregs));
1836 if (pat)
1838 emit_insn (pat);
1839 return;
1841 else
1842 delete_insns_since (last);
1844 #endif
1846 for (i = 0; i < nregs; i++)
1847 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1848 operand_subword_force (x, i, mode));
1851 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1852 The number of registers to be filled is NREGS. SIZE indicates the number
1853 of bytes in the object X. */
1855 void
1856 move_block_from_reg (regno, x, nregs, size)
1857 int regno;
1858 rtx x;
1859 int nregs;
1860 int size;
1862 int i;
1863 #ifdef HAVE_store_multiple
1864 rtx pat;
1865 rtx last;
1866 #endif
1867 enum machine_mode mode;
1869 /* If SIZE is that of a mode no bigger than a word, just use that
1870 mode's store operation. */
1871 if (size <= UNITS_PER_WORD
1872 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1874 emit_move_insn (change_address (x, mode, NULL),
1875 gen_rtx_REG (mode, regno));
1876 return;
1879 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1880 to the left before storing to memory. Note that the previous test
1881 doesn't handle all cases (e.g. SIZE == 3). */
1882 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1884 rtx tem = operand_subword (x, 0, 1, BLKmode);
1885 rtx shift;
1887 if (tem == 0)
1888 abort ();
1890 shift = expand_shift (LSHIFT_EXPR, word_mode,
1891 gen_rtx_REG (word_mode, regno),
1892 build_int_2 ((UNITS_PER_WORD - size)
1893 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1894 emit_move_insn (tem, shift);
1895 return;
1898 /* See if the machine can do this with a store multiple insn. */
1899 #ifdef HAVE_store_multiple
1900 if (HAVE_store_multiple)
1902 last = get_last_insn ();
1903 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1904 GEN_INT (nregs));
1905 if (pat)
1907 emit_insn (pat);
1908 return;
1910 else
1911 delete_insns_since (last);
1913 #endif
1915 for (i = 0; i < nregs; i++)
1917 rtx tem = operand_subword (x, i, 1, BLKmode);
1919 if (tem == 0)
1920 abort ();
1922 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1926 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1927 registers represented by a PARALLEL. SSIZE represents the total size of
1928 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1929 SRC in bits. */
1930 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1931 the balance will be in what would be the low-order memory addresses, i.e.
1932 left justified for big endian, right justified for little endian. This
1933 happens to be true for the targets currently using this support. If this
1934 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1935 would be needed. */
1937 void
1938 emit_group_load (dst, orig_src, ssize, align)
1939 rtx dst, orig_src;
1940 unsigned int align;
1941 int ssize;
1943 rtx *tmps, src;
1944 int start, i;
1946 if (GET_CODE (dst) != PARALLEL)
1947 abort ();
1949 /* Check for a NULL entry, used to indicate that the parameter goes
1950 both on the stack and in registers. */
1951 if (XEXP (XVECEXP (dst, 0, 0), 0))
1952 start = 0;
1953 else
1954 start = 1;
1956 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1958 /* If we won't be loading directly from memory, protect the real source
1959 from strange tricks we might play. */
1960 src = orig_src;
1961 if (GET_CODE (src) != MEM && ! CONSTANT_P (src))
1963 if (GET_MODE (src) == VOIDmode)
1964 src = gen_reg_rtx (GET_MODE (dst));
1965 else
1966 src = gen_reg_rtx (GET_MODE (orig_src));
1967 emit_move_insn (src, orig_src);
1970 /* Process the pieces. */
1971 for (i = start; i < XVECLEN (dst, 0); i++)
1973 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1974 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1975 unsigned int bytelen = GET_MODE_SIZE (mode);
1976 int shift = 0;
1978 /* Handle trailing fragments that run over the size of the struct. */
1979 if (ssize >= 0 && bytepos + bytelen > ssize)
1981 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1982 bytelen = ssize - bytepos;
1983 if (bytelen <= 0)
1984 abort ();
1987 /* Optimize the access just a bit. */
1988 if (GET_CODE (src) == MEM
1989 && align >= GET_MODE_ALIGNMENT (mode)
1990 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1991 && bytelen == GET_MODE_SIZE (mode))
1993 tmps[i] = gen_reg_rtx (mode);
1994 emit_move_insn (tmps[i],
1995 change_address (src, mode,
1996 plus_constant (XEXP (src, 0),
1997 bytepos)));
1999 else if (GET_CODE (src) == CONCAT)
2001 if (bytepos == 0
2002 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2003 tmps[i] = XEXP (src, 0);
2004 else if (bytepos == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2005 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
2006 tmps[i] = XEXP (src, 1);
2007 else
2008 abort ();
2010 else if ((CONSTANT_P (src)
2011 && (GET_MODE (src) == VOIDmode || GET_MODE (src) == mode))
2012 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2013 tmps[i] = src;
2014 else
2015 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2016 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2017 mode, mode, align, ssize);
2019 if (BYTES_BIG_ENDIAN && shift)
2020 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2021 tmps[i], 0, OPTAB_WIDEN);
2024 emit_queue ();
2026 /* Copy the extracted pieces into the proper (probable) hard regs. */
2027 for (i = start; i < XVECLEN (dst, 0); i++)
2028 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2031 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2032 registers represented by a PARALLEL. SSIZE represents the total size of
2033 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2035 void
2036 emit_group_store (orig_dst, src, ssize, align)
2037 rtx orig_dst, src;
2038 int ssize;
2039 unsigned int align;
2041 rtx *tmps, dst;
2042 int start, i;
2044 if (GET_CODE (src) != PARALLEL)
2045 abort ();
2047 /* Check for a NULL entry, used to indicate that the parameter goes
2048 both on the stack and in registers. */
2049 if (XEXP (XVECEXP (src, 0, 0), 0))
2050 start = 0;
2051 else
2052 start = 1;
2054 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2056 /* Copy the (probable) hard regs into pseudos. */
2057 for (i = start; i < XVECLEN (src, 0); i++)
2059 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2060 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2061 emit_move_insn (tmps[i], reg);
2063 emit_queue ();
2065 /* If we won't be storing directly into memory, protect the real destination
2066 from strange tricks we might play. */
2067 dst = orig_dst;
2068 if (GET_CODE (dst) == PARALLEL)
2070 rtx temp;
2072 /* We can get a PARALLEL dst if there is a conditional expression in
2073 a return statement. In that case, the dst and src are the same,
2074 so no action is necessary. */
2075 if (rtx_equal_p (dst, src))
2076 return;
2078 /* It is unclear if we can ever reach here, but we may as well handle
2079 it. Allocate a temporary, and split this into a store/load to/from
2080 the temporary. */
2082 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2083 emit_group_store (temp, src, ssize, align);
2084 emit_group_load (dst, temp, ssize, align);
2085 return;
2087 else if (GET_CODE (dst) != MEM)
2089 dst = gen_reg_rtx (GET_MODE (orig_dst));
2090 /* Make life a bit easier for combine. */
2091 emit_move_insn (dst, const0_rtx);
2094 /* Process the pieces. */
2095 for (i = start; i < XVECLEN (src, 0); i++)
2097 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2098 enum machine_mode mode = GET_MODE (tmps[i]);
2099 unsigned int bytelen = GET_MODE_SIZE (mode);
2101 /* Handle trailing fragments that run over the size of the struct. */
2102 if (ssize >= 0 && bytepos + bytelen > ssize)
2104 if (BYTES_BIG_ENDIAN)
2106 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2107 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2108 tmps[i], 0, OPTAB_WIDEN);
2110 bytelen = ssize - bytepos;
2113 /* Optimize the access just a bit. */
2114 if (GET_CODE (dst) == MEM
2115 && align >= GET_MODE_ALIGNMENT (mode)
2116 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2117 && bytelen == GET_MODE_SIZE (mode))
2118 emit_move_insn (change_address (dst, mode,
2119 plus_constant (XEXP (dst, 0),
2120 bytepos)),
2121 tmps[i]);
2122 else
2123 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2124 mode, tmps[i], align, ssize);
2127 emit_queue ();
2129 /* Copy from the pseudo into the (probable) hard reg. */
2130 if (GET_CODE (dst) == REG)
2131 emit_move_insn (orig_dst, dst);
2134 /* Generate code to copy a BLKmode object of TYPE out of a
2135 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2136 is null, a stack temporary is created. TGTBLK is returned.
2138 The primary purpose of this routine is to handle functions
2139 that return BLKmode structures in registers. Some machines
2140 (the PA for example) want to return all small structures
2141 in registers regardless of the structure's alignment. */
2144 copy_blkmode_from_reg (tgtblk, srcreg, type)
2145 rtx tgtblk;
2146 rtx srcreg;
2147 tree type;
2149 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2150 rtx src = NULL, dst = NULL;
2151 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2152 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2154 if (tgtblk == 0)
2156 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2157 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2158 preserve_temp_slots (tgtblk);
2161 /* This code assumes srcreg is at least a full word. If it isn't,
2162 copy it into a new pseudo which is a full word. */
2163 if (GET_MODE (srcreg) != BLKmode
2164 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2165 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2167 /* Structures whose size is not a multiple of a word are aligned
2168 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2169 machine, this means we must skip the empty high order bytes when
2170 calculating the bit offset. */
2171 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2172 big_endian_correction
2173 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2175 /* Copy the structure BITSIZE bites at a time.
2177 We could probably emit more efficient code for machines which do not use
2178 strict alignment, but it doesn't seem worth the effort at the current
2179 time. */
2180 for (bitpos = 0, xbitpos = big_endian_correction;
2181 bitpos < bytes * BITS_PER_UNIT;
2182 bitpos += bitsize, xbitpos += bitsize)
2184 /* We need a new source operand each time xbitpos is on a
2185 word boundary and when xbitpos == big_endian_correction
2186 (the first time through). */
2187 if (xbitpos % BITS_PER_WORD == 0
2188 || xbitpos == big_endian_correction)
2189 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, BLKmode);
2191 /* We need a new destination operand each time bitpos is on
2192 a word boundary. */
2193 if (bitpos % BITS_PER_WORD == 0)
2194 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2196 /* Use xbitpos for the source extraction (right justified) and
2197 xbitpos for the destination store (left justified). */
2198 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2199 extract_bit_field (src, bitsize,
2200 xbitpos % BITS_PER_WORD, 1,
2201 NULL_RTX, word_mode, word_mode,
2202 bitsize, BITS_PER_WORD),
2203 bitsize, BITS_PER_WORD);
2206 return tgtblk;
2209 /* Add a USE expression for REG to the (possibly empty) list pointed
2210 to by CALL_FUSAGE. REG must denote a hard register. */
2212 void
2213 use_reg (call_fusage, reg)
2214 rtx *call_fusage, reg;
2216 if (GET_CODE (reg) != REG
2217 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2218 abort ();
2220 *call_fusage
2221 = gen_rtx_EXPR_LIST (VOIDmode,
2222 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2225 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2226 starting at REGNO. All of these registers must be hard registers. */
2228 void
2229 use_regs (call_fusage, regno, nregs)
2230 rtx *call_fusage;
2231 int regno;
2232 int nregs;
2234 int i;
2236 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2237 abort ();
2239 for (i = 0; i < nregs; i++)
2240 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2243 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2244 PARALLEL REGS. This is for calls that pass values in multiple
2245 non-contiguous locations. The Irix 6 ABI has examples of this. */
2247 void
2248 use_group_regs (call_fusage, regs)
2249 rtx *call_fusage;
2250 rtx regs;
2252 int i;
2254 for (i = 0; i < XVECLEN (regs, 0); i++)
2256 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2258 /* A NULL entry means the parameter goes both on the stack and in
2259 registers. This can also be a MEM for targets that pass values
2260 partially on the stack and partially in registers. */
2261 if (reg != 0 && GET_CODE (reg) == REG)
2262 use_reg (call_fusage, reg);
2266 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2267 rtx with BLKmode). The caller must pass TO through protect_from_queue
2268 before calling. ALIGN is maximum alignment we can assume. */
2270 static void
2271 clear_by_pieces (to, len, align)
2272 rtx to;
2273 unsigned HOST_WIDE_INT len;
2274 unsigned int align;
2276 struct clear_by_pieces data;
2277 rtx to_addr = XEXP (to, 0);
2278 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2279 enum machine_mode mode = VOIDmode, tmode;
2280 enum insn_code icode;
2282 data.offset = 0;
2283 data.to_addr = to_addr;
2284 data.to = to;
2285 data.autinc_to
2286 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2287 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2289 data.explicit_inc_to = 0;
2290 data.reverse
2291 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2292 if (data.reverse)
2293 data.offset = len;
2294 data.len = len;
2296 /* If copying requires more than two move insns,
2297 copy addresses to registers (to make displacements shorter)
2298 and use post-increment if available. */
2299 if (!data.autinc_to
2300 && move_by_pieces_ninsns (len, align) > 2)
2302 /* Determine the main mode we'll be using. */
2303 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2304 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2305 if (GET_MODE_SIZE (tmode) < max_size)
2306 mode = tmode;
2308 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2310 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2311 data.autinc_to = 1;
2312 data.explicit_inc_to = -1;
2315 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse
2316 && ! data.autinc_to)
2318 data.to_addr = copy_addr_to_reg (to_addr);
2319 data.autinc_to = 1;
2320 data.explicit_inc_to = 1;
2323 if ( !data.autinc_to && CONSTANT_P (to_addr))
2324 data.to_addr = copy_addr_to_reg (to_addr);
2327 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2328 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2329 align = MOVE_MAX * BITS_PER_UNIT;
2331 /* First move what we can in the largest integer mode, then go to
2332 successively smaller modes. */
2334 while (max_size > 1)
2336 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2337 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2338 if (GET_MODE_SIZE (tmode) < max_size)
2339 mode = tmode;
2341 if (mode == VOIDmode)
2342 break;
2344 icode = mov_optab->handlers[(int) mode].insn_code;
2345 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2346 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2348 max_size = GET_MODE_SIZE (mode);
2351 /* The code above should have handled everything. */
2352 if (data.len != 0)
2353 abort ();
2356 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2357 with move instructions for mode MODE. GENFUN is the gen_... function
2358 to make a move insn for that mode. DATA has all the other info. */
2360 static void
2361 clear_by_pieces_1 (genfun, mode, data)
2362 rtx (*genfun) PARAMS ((rtx, ...));
2363 enum machine_mode mode;
2364 struct clear_by_pieces *data;
2366 unsigned int size = GET_MODE_SIZE (mode);
2367 rtx to1;
2369 while (data->len >= size)
2371 if (data->reverse)
2372 data->offset -= size;
2374 if (data->autinc_to)
2376 to1 = gen_rtx_MEM (mode, data->to_addr);
2377 MEM_COPY_ATTRIBUTES (to1, data->to);
2379 else
2380 to1 = change_address (data->to, mode,
2381 plus_constant (data->to_addr, data->offset));
2383 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2384 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2386 emit_insn ((*genfun) (to1, const0_rtx));
2388 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2389 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2391 if (! data->reverse)
2392 data->offset += size;
2394 data->len -= size;
2398 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2399 its length in bytes and ALIGN is the maximum alignment we can is has.
2401 If we call a function that returns the length of the block, return it. */
2404 clear_storage (object, size, align)
2405 rtx object;
2406 rtx size;
2407 unsigned int align;
2409 #ifdef TARGET_MEM_FUNCTIONS
2410 static tree fn;
2411 tree call_expr, arg_list;
2412 #endif
2413 rtx retval = 0;
2415 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2416 just move a zero. Otherwise, do this a piece at a time. */
2417 if (GET_MODE (object) != BLKmode
2418 && GET_CODE (size) == CONST_INT
2419 && GET_MODE_SIZE (GET_MODE (object)) == INTVAL (size))
2420 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2421 else
2423 object = protect_from_queue (object, 1);
2424 size = protect_from_queue (size, 0);
2426 if (GET_CODE (size) == CONST_INT
2427 && MOVE_BY_PIECES_P (INTVAL (size), align))
2428 clear_by_pieces (object, INTVAL (size), align);
2429 else
2431 /* Try the most limited insn first, because there's no point
2432 including more than one in the machine description unless
2433 the more limited one has some advantage. */
2435 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2436 enum machine_mode mode;
2438 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2439 mode = GET_MODE_WIDER_MODE (mode))
2441 enum insn_code code = clrstr_optab[(int) mode];
2442 insn_operand_predicate_fn pred;
2444 if (code != CODE_FOR_nothing
2445 /* We don't need MODE to be narrower than
2446 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2447 the mode mask, as it is returned by the macro, it will
2448 definitely be less than the actual mode mask. */
2449 && ((GET_CODE (size) == CONST_INT
2450 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2451 <= (GET_MODE_MASK (mode) >> 1)))
2452 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2453 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2454 || (*pred) (object, BLKmode))
2455 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2456 || (*pred) (opalign, VOIDmode)))
2458 rtx op1;
2459 rtx last = get_last_insn ();
2460 rtx pat;
2462 op1 = convert_to_mode (mode, size, 1);
2463 pred = insn_data[(int) code].operand[1].predicate;
2464 if (pred != 0 && ! (*pred) (op1, mode))
2465 op1 = copy_to_mode_reg (mode, op1);
2467 pat = GEN_FCN ((int) code) (object, op1, opalign);
2468 if (pat)
2470 emit_insn (pat);
2471 return 0;
2473 else
2474 delete_insns_since (last);
2478 /* OBJECT or SIZE may have been passed through protect_from_queue.
2480 It is unsafe to save the value generated by protect_from_queue
2481 and reuse it later. Consider what happens if emit_queue is
2482 called before the return value from protect_from_queue is used.
2484 Expansion of the CALL_EXPR below will call emit_queue before
2485 we are finished emitting RTL for argument setup. So if we are
2486 not careful we could get the wrong value for an argument.
2488 To avoid this problem we go ahead and emit code to copy OBJECT
2489 and SIZE into new pseudos. We can then place those new pseudos
2490 into an RTL_EXPR and use them later, even after a call to
2491 emit_queue.
2493 Note this is not strictly needed for library calls since they
2494 do not call emit_queue before loading their arguments. However,
2495 we may need to have library calls call emit_queue in the future
2496 since failing to do so could cause problems for targets which
2497 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2498 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2500 #ifdef TARGET_MEM_FUNCTIONS
2501 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2502 #else
2503 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2504 TREE_UNSIGNED (integer_type_node));
2505 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2506 #endif
2508 #ifdef TARGET_MEM_FUNCTIONS
2509 /* It is incorrect to use the libcall calling conventions to call
2510 memset in this context.
2512 This could be a user call to memset and the user may wish to
2513 examine the return value from memset.
2515 For targets where libcalls and normal calls have different
2516 conventions for returning pointers, we could end up generating
2517 incorrect code.
2519 So instead of using a libcall sequence we build up a suitable
2520 CALL_EXPR and expand the call in the normal fashion. */
2521 if (fn == NULL_TREE)
2523 tree fntype;
2525 /* This was copied from except.c, I don't know if all this is
2526 necessary in this context or not. */
2527 fn = get_identifier ("memset");
2528 push_obstacks_nochange ();
2529 end_temporary_allocation ();
2530 fntype = build_pointer_type (void_type_node);
2531 fntype = build_function_type (fntype, NULL_TREE);
2532 fn = build_decl (FUNCTION_DECL, fn, fntype);
2533 ggc_add_tree_root (&fn, 1);
2534 DECL_EXTERNAL (fn) = 1;
2535 TREE_PUBLIC (fn) = 1;
2536 DECL_ARTIFICIAL (fn) = 1;
2537 make_decl_rtl (fn, NULL_PTR, 1);
2538 assemble_external (fn);
2539 pop_obstacks ();
2542 /* We need to make an argument list for the function call.
2544 memset has three arguments, the first is a void * addresses, the
2545 second a integer with the initialization value, the last is a
2546 size_t byte count for the copy. */
2547 arg_list
2548 = build_tree_list (NULL_TREE,
2549 make_tree (build_pointer_type (void_type_node),
2550 object));
2551 TREE_CHAIN (arg_list)
2552 = build_tree_list (NULL_TREE,
2553 make_tree (integer_type_node, const0_rtx));
2554 TREE_CHAIN (TREE_CHAIN (arg_list))
2555 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2556 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2558 /* Now we have to build up the CALL_EXPR itself. */
2559 call_expr = build1 (ADDR_EXPR,
2560 build_pointer_type (TREE_TYPE (fn)), fn);
2561 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2562 call_expr, arg_list, NULL_TREE);
2563 TREE_SIDE_EFFECTS (call_expr) = 1;
2565 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2566 #else
2567 emit_library_call (bzero_libfunc, LCT_NORMAL,
2568 VOIDmode, 2, object, Pmode, size,
2569 TYPE_MODE (integer_type_node));
2570 #endif
2574 return retval;
2577 /* Generate code to copy Y into X.
2578 Both Y and X must have the same mode, except that
2579 Y can be a constant with VOIDmode.
2580 This mode cannot be BLKmode; use emit_block_move for that.
2582 Return the last instruction emitted. */
2585 emit_move_insn (x, y)
2586 rtx x, y;
2588 enum machine_mode mode = GET_MODE (x);
2590 x = protect_from_queue (x, 1);
2591 y = protect_from_queue (y, 0);
2593 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2594 abort ();
2596 /* Never force constant_p_rtx to memory. */
2597 if (GET_CODE (y) == CONSTANT_P_RTX)
2599 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2600 y = force_const_mem (mode, y);
2602 /* If X or Y are memory references, verify that their addresses are valid
2603 for the machine. */
2604 if (GET_CODE (x) == MEM
2605 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2606 && ! push_operand (x, GET_MODE (x)))
2607 || (flag_force_addr
2608 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2609 x = change_address (x, VOIDmode, XEXP (x, 0));
2611 if (GET_CODE (y) == MEM
2612 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2613 || (flag_force_addr
2614 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2615 y = change_address (y, VOIDmode, XEXP (y, 0));
2617 if (mode == BLKmode)
2618 abort ();
2620 return emit_move_insn_1 (x, y);
2623 /* Low level part of emit_move_insn.
2624 Called just like emit_move_insn, but assumes X and Y
2625 are basically valid. */
2628 emit_move_insn_1 (x, y)
2629 rtx x, y;
2631 enum machine_mode mode = GET_MODE (x);
2632 enum machine_mode submode;
2633 enum mode_class class = GET_MODE_CLASS (mode);
2634 unsigned int i;
2636 if (mode >= MAX_MACHINE_MODE)
2637 abort ();
2639 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2640 return
2641 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2643 /* Expand complex moves by moving real part and imag part, if possible. */
2644 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2645 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2646 * BITS_PER_UNIT),
2647 (class == MODE_COMPLEX_INT
2648 ? MODE_INT : MODE_FLOAT),
2650 && (mov_optab->handlers[(int) submode].insn_code
2651 != CODE_FOR_nothing))
2653 /* Don't split destination if it is a stack push. */
2654 int stack = push_operand (x, GET_MODE (x));
2656 /* If this is a stack, push the highpart first, so it
2657 will be in the argument order.
2659 In that case, change_address is used only to convert
2660 the mode, not to change the address. */
2661 if (stack)
2663 /* Note that the real part always precedes the imag part in memory
2664 regardless of machine's endianness. */
2665 #ifdef STACK_GROWS_DOWNWARD
2666 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2667 (gen_rtx_MEM (submode, XEXP (x, 0)),
2668 gen_imagpart (submode, y)));
2669 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2670 (gen_rtx_MEM (submode, XEXP (x, 0)),
2671 gen_realpart (submode, y)));
2672 #else
2673 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2674 (gen_rtx_MEM (submode, XEXP (x, 0)),
2675 gen_realpart (submode, y)));
2676 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2677 (gen_rtx_MEM (submode, XEXP (x, 0)),
2678 gen_imagpart (submode, y)));
2679 #endif
2681 else
2683 rtx realpart_x, realpart_y;
2684 rtx imagpart_x, imagpart_y;
2686 /* If this is a complex value with each part being smaller than a
2687 word, the usual calling sequence will likely pack the pieces into
2688 a single register. Unfortunately, SUBREG of hard registers only
2689 deals in terms of words, so we have a problem converting input
2690 arguments to the CONCAT of two registers that is used elsewhere
2691 for complex values. If this is before reload, we can copy it into
2692 memory and reload. FIXME, we should see about using extract and
2693 insert on integer registers, but complex short and complex char
2694 variables should be rarely used. */
2695 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2696 && (reload_in_progress | reload_completed) == 0)
2698 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2699 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2701 if (packed_dest_p || packed_src_p)
2703 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2704 ? MODE_FLOAT : MODE_INT);
2706 enum machine_mode reg_mode =
2707 mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2709 if (reg_mode != BLKmode)
2711 rtx mem = assign_stack_temp (reg_mode,
2712 GET_MODE_SIZE (mode), 0);
2714 rtx cmem = change_address (mem, mode, NULL_RTX);
2716 cfun->cannot_inline = N_("function using short complex types cannot be inline");
2718 if (packed_dest_p)
2720 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2721 emit_move_insn_1 (cmem, y);
2722 return emit_move_insn_1 (sreg, mem);
2724 else
2726 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2727 emit_move_insn_1 (mem, sreg);
2728 return emit_move_insn_1 (x, cmem);
2734 realpart_x = gen_realpart (submode, x);
2735 realpart_y = gen_realpart (submode, y);
2736 imagpart_x = gen_imagpart (submode, x);
2737 imagpart_y = gen_imagpart (submode, y);
2739 /* Show the output dies here. This is necessary for SUBREGs
2740 of pseudos since we cannot track their lifetimes correctly;
2741 hard regs shouldn't appear here except as return values.
2742 We never want to emit such a clobber after reload. */
2743 if (x != y
2744 && ! (reload_in_progress || reload_completed)
2745 && (GET_CODE (realpart_x) == SUBREG
2746 || GET_CODE (imagpart_x) == SUBREG))
2748 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2751 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2752 (realpart_x, realpart_y));
2753 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2754 (imagpart_x, imagpart_y));
2757 return get_last_insn ();
2760 /* This will handle any multi-word mode that lacks a move_insn pattern.
2761 However, you will get better code if you define such patterns,
2762 even if they must turn into multiple assembler instructions. */
2763 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2765 rtx last_insn = 0;
2766 rtx seq, inner;
2767 int need_clobber;
2769 #ifdef PUSH_ROUNDING
2771 /* If X is a push on the stack, do the push now and replace
2772 X with a reference to the stack pointer. */
2773 if (push_operand (x, GET_MODE (x)))
2775 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2776 x = change_address (x, VOIDmode, stack_pointer_rtx);
2778 #endif
2780 /* If we are in reload, see if either operand is a MEM whose address
2781 is scheduled for replacement. */
2782 if (reload_in_progress && GET_CODE (x) == MEM
2783 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2785 rtx new = gen_rtx_MEM (GET_MODE (x), inner);
2787 MEM_COPY_ATTRIBUTES (new, x);
2788 x = new;
2790 if (reload_in_progress && GET_CODE (y) == MEM
2791 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2793 rtx new = gen_rtx_MEM (GET_MODE (y), inner);
2795 MEM_COPY_ATTRIBUTES (new, y);
2796 y = new;
2799 start_sequence ();
2801 need_clobber = 0;
2802 for (i = 0;
2803 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2804 i++)
2806 rtx xpart = operand_subword (x, i, 1, mode);
2807 rtx ypart = operand_subword (y, i, 1, mode);
2809 /* If we can't get a part of Y, put Y into memory if it is a
2810 constant. Otherwise, force it into a register. If we still
2811 can't get a part of Y, abort. */
2812 if (ypart == 0 && CONSTANT_P (y))
2814 y = force_const_mem (mode, y);
2815 ypart = operand_subword (y, i, 1, mode);
2817 else if (ypart == 0)
2818 ypart = operand_subword_force (y, i, mode);
2820 if (xpart == 0 || ypart == 0)
2821 abort ();
2823 need_clobber |= (GET_CODE (xpart) == SUBREG);
2825 last_insn = emit_move_insn (xpart, ypart);
2828 seq = gen_sequence ();
2829 end_sequence ();
2831 /* Show the output dies here. This is necessary for SUBREGs
2832 of pseudos since we cannot track their lifetimes correctly;
2833 hard regs shouldn't appear here except as return values.
2834 We never want to emit such a clobber after reload. */
2835 if (x != y
2836 && ! (reload_in_progress || reload_completed)
2837 && need_clobber != 0)
2839 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2842 emit_insn (seq);
2844 return last_insn;
2846 else
2847 abort ();
2850 /* Pushing data onto the stack. */
2852 /* Push a block of length SIZE (perhaps variable)
2853 and return an rtx to address the beginning of the block.
2854 Note that it is not possible for the value returned to be a QUEUED.
2855 The value may be virtual_outgoing_args_rtx.
2857 EXTRA is the number of bytes of padding to push in addition to SIZE.
2858 BELOW nonzero means this padding comes at low addresses;
2859 otherwise, the padding comes at high addresses. */
2862 push_block (size, extra, below)
2863 rtx size;
2864 int extra, below;
2866 register rtx temp;
2868 size = convert_modes (Pmode, ptr_mode, size, 1);
2869 if (CONSTANT_P (size))
2870 anti_adjust_stack (plus_constant (size, extra));
2871 else if (GET_CODE (size) == REG && extra == 0)
2872 anti_adjust_stack (size);
2873 else
2875 temp = copy_to_mode_reg (Pmode, size);
2876 if (extra != 0)
2877 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2878 temp, 0, OPTAB_LIB_WIDEN);
2879 anti_adjust_stack (temp);
2882 #ifndef STACK_GROWS_DOWNWARD
2883 #ifdef ARGS_GROW_DOWNWARD
2884 if (!ACCUMULATE_OUTGOING_ARGS)
2885 #else
2886 if (0)
2887 #endif
2888 #else
2889 if (1)
2890 #endif
2892 /* Return the lowest stack address when STACK or ARGS grow downward and
2893 we are not aaccumulating outgoing arguments (the c4x port uses such
2894 conventions). */
2895 temp = virtual_outgoing_args_rtx;
2896 if (extra != 0 && below)
2897 temp = plus_constant (temp, extra);
2899 else
2901 if (GET_CODE (size) == CONST_INT)
2902 temp = plus_constant (virtual_outgoing_args_rtx,
2903 -INTVAL (size) - (below ? 0 : extra));
2904 else if (extra != 0 && !below)
2905 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2906 negate_rtx (Pmode, plus_constant (size, extra)));
2907 else
2908 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2909 negate_rtx (Pmode, size));
2912 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2916 gen_push_operand ()
2918 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2921 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2922 block of SIZE bytes. */
2924 static rtx
2925 get_push_address (size)
2926 int size;
2928 register rtx temp;
2930 if (STACK_PUSH_CODE == POST_DEC)
2931 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2932 else if (STACK_PUSH_CODE == POST_INC)
2933 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2934 else
2935 temp = stack_pointer_rtx;
2937 return copy_to_reg (temp);
2940 /* Generate code to push X onto the stack, assuming it has mode MODE and
2941 type TYPE.
2942 MODE is redundant except when X is a CONST_INT (since they don't
2943 carry mode info).
2944 SIZE is an rtx for the size of data to be copied (in bytes),
2945 needed only if X is BLKmode.
2947 ALIGN is maximum alignment we can assume.
2949 If PARTIAL and REG are both nonzero, then copy that many of the first
2950 words of X into registers starting with REG, and push the rest of X.
2951 The amount of space pushed is decreased by PARTIAL words,
2952 rounded *down* to a multiple of PARM_BOUNDARY.
2953 REG must be a hard register in this case.
2954 If REG is zero but PARTIAL is not, take any all others actions for an
2955 argument partially in registers, but do not actually load any
2956 registers.
2958 EXTRA is the amount in bytes of extra space to leave next to this arg.
2959 This is ignored if an argument block has already been allocated.
2961 On a machine that lacks real push insns, ARGS_ADDR is the address of
2962 the bottom of the argument block for this call. We use indexing off there
2963 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2964 argument block has not been preallocated.
2966 ARGS_SO_FAR is the size of args previously pushed for this call.
2968 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2969 for arguments passed in registers. If nonzero, it will be the number
2970 of bytes required. */
2972 void
2973 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2974 args_addr, args_so_far, reg_parm_stack_space,
2975 alignment_pad)
2976 register rtx x;
2977 enum machine_mode mode;
2978 tree type;
2979 rtx size;
2980 unsigned int align;
2981 int partial;
2982 rtx reg;
2983 int extra;
2984 rtx args_addr;
2985 rtx args_so_far;
2986 int reg_parm_stack_space;
2987 rtx alignment_pad;
2989 rtx xinner;
2990 enum direction stack_direction
2991 #ifdef STACK_GROWS_DOWNWARD
2992 = downward;
2993 #else
2994 = upward;
2995 #endif
2997 /* Decide where to pad the argument: `downward' for below,
2998 `upward' for above, or `none' for don't pad it.
2999 Default is below for small data on big-endian machines; else above. */
3000 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3002 /* Invert direction if stack is post-update. */
3003 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
3004 if (where_pad != none)
3005 where_pad = (where_pad == downward ? upward : downward);
3007 xinner = x = protect_from_queue (x, 0);
3009 if (mode == BLKmode)
3011 /* Copy a block into the stack, entirely or partially. */
3013 register rtx temp;
3014 int used = partial * UNITS_PER_WORD;
3015 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3016 int skip;
3018 if (size == 0)
3019 abort ();
3021 used -= offset;
3023 /* USED is now the # of bytes we need not copy to the stack
3024 because registers will take care of them. */
3026 if (partial != 0)
3027 xinner = change_address (xinner, BLKmode,
3028 plus_constant (XEXP (xinner, 0), used));
3030 /* If the partial register-part of the arg counts in its stack size,
3031 skip the part of stack space corresponding to the registers.
3032 Otherwise, start copying to the beginning of the stack space,
3033 by setting SKIP to 0. */
3034 skip = (reg_parm_stack_space == 0) ? 0 : used;
3036 #ifdef PUSH_ROUNDING
3037 /* Do it with several push insns if that doesn't take lots of insns
3038 and if there is no difficulty with push insns that skip bytes
3039 on the stack for alignment purposes. */
3040 if (args_addr == 0
3041 && PUSH_ARGS
3042 && GET_CODE (size) == CONST_INT
3043 && skip == 0
3044 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3045 /* Here we avoid the case of a structure whose weak alignment
3046 forces many pushes of a small amount of data,
3047 and such small pushes do rounding that causes trouble. */
3048 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3049 || align >= BIGGEST_ALIGNMENT
3050 || PUSH_ROUNDING (align) == align)
3051 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3053 /* Push padding now if padding above and stack grows down,
3054 or if padding below and stack grows up.
3055 But if space already allocated, this has already been done. */
3056 if (extra && args_addr == 0
3057 && where_pad != none && where_pad != stack_direction)
3058 anti_adjust_stack (GEN_INT (extra));
3060 stack_pointer_delta += INTVAL (size) - used;
3061 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
3062 INTVAL (size) - used, align);
3064 if (current_function_check_memory_usage && ! in_check_memory_usage)
3066 rtx temp;
3068 in_check_memory_usage = 1;
3069 temp = get_push_address (INTVAL (size) - used);
3070 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3071 emit_library_call (chkr_copy_bitmap_libfunc,
3072 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3073 Pmode, XEXP (xinner, 0), Pmode,
3074 GEN_INT (INTVAL (size) - used),
3075 TYPE_MODE (sizetype));
3076 else
3077 emit_library_call (chkr_set_right_libfunc,
3078 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3079 Pmode, GEN_INT (INTVAL (size) - used),
3080 TYPE_MODE (sizetype),
3081 GEN_INT (MEMORY_USE_RW),
3082 TYPE_MODE (integer_type_node));
3083 in_check_memory_usage = 0;
3086 else
3087 #endif /* PUSH_ROUNDING */
3089 rtx target;
3091 /* Otherwise make space on the stack and copy the data
3092 to the address of that space. */
3094 /* Deduct words put into registers from the size we must copy. */
3095 if (partial != 0)
3097 if (GET_CODE (size) == CONST_INT)
3098 size = GEN_INT (INTVAL (size) - used);
3099 else
3100 size = expand_binop (GET_MODE (size), sub_optab, size,
3101 GEN_INT (used), NULL_RTX, 0,
3102 OPTAB_LIB_WIDEN);
3105 /* Get the address of the stack space.
3106 In this case, we do not deal with EXTRA separately.
3107 A single stack adjust will do. */
3108 if (! args_addr)
3110 temp = push_block (size, extra, where_pad == downward);
3111 extra = 0;
3113 else if (GET_CODE (args_so_far) == CONST_INT)
3114 temp = memory_address (BLKmode,
3115 plus_constant (args_addr,
3116 skip + INTVAL (args_so_far)));
3117 else
3118 temp = memory_address (BLKmode,
3119 plus_constant (gen_rtx_PLUS (Pmode,
3120 args_addr,
3121 args_so_far),
3122 skip));
3123 if (current_function_check_memory_usage && ! in_check_memory_usage)
3125 in_check_memory_usage = 1;
3126 target = copy_to_reg (temp);
3127 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3128 emit_library_call (chkr_copy_bitmap_libfunc,
3129 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3130 target, Pmode,
3131 XEXP (xinner, 0), Pmode,
3132 size, TYPE_MODE (sizetype));
3133 else
3134 emit_library_call (chkr_set_right_libfunc,
3135 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3136 target, Pmode,
3137 size, TYPE_MODE (sizetype),
3138 GEN_INT (MEMORY_USE_RW),
3139 TYPE_MODE (integer_type_node));
3140 in_check_memory_usage = 0;
3143 target = gen_rtx_MEM (BLKmode, temp);
3145 if (type != 0)
3147 set_mem_attributes (target, type, 1);
3148 /* Function incoming arguments may overlap with sibling call
3149 outgoing arguments and we cannot allow reordering of reads
3150 from function arguments with stores to outgoing arguments
3151 of sibling calls. */
3152 MEM_ALIAS_SET (target) = 0;
3155 /* TEMP is the address of the block. Copy the data there. */
3156 if (GET_CODE (size) == CONST_INT
3157 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3159 move_by_pieces (target, xinner, INTVAL (size), align);
3160 goto ret;
3162 else
3164 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3165 enum machine_mode mode;
3167 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3168 mode != VOIDmode;
3169 mode = GET_MODE_WIDER_MODE (mode))
3171 enum insn_code code = movstr_optab[(int) mode];
3172 insn_operand_predicate_fn pred;
3174 if (code != CODE_FOR_nothing
3175 && ((GET_CODE (size) == CONST_INT
3176 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3177 <= (GET_MODE_MASK (mode) >> 1)))
3178 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3179 && (!(pred = insn_data[(int) code].operand[0].predicate)
3180 || ((*pred) (target, BLKmode)))
3181 && (!(pred = insn_data[(int) code].operand[1].predicate)
3182 || ((*pred) (xinner, BLKmode)))
3183 && (!(pred = insn_data[(int) code].operand[3].predicate)
3184 || ((*pred) (opalign, VOIDmode))))
3186 rtx op2 = convert_to_mode (mode, size, 1);
3187 rtx last = get_last_insn ();
3188 rtx pat;
3190 pred = insn_data[(int) code].operand[2].predicate;
3191 if (pred != 0 && ! (*pred) (op2, mode))
3192 op2 = copy_to_mode_reg (mode, op2);
3194 pat = GEN_FCN ((int) code) (target, xinner,
3195 op2, opalign);
3196 if (pat)
3198 emit_insn (pat);
3199 goto ret;
3201 else
3202 delete_insns_since (last);
3207 if (!ACCUMULATE_OUTGOING_ARGS)
3209 /* If the source is referenced relative to the stack pointer,
3210 copy it to another register to stabilize it. We do not need
3211 to do this if we know that we won't be changing sp. */
3213 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3214 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3215 temp = copy_to_reg (temp);
3218 /* Make inhibit_defer_pop nonzero around the library call
3219 to force it to pop the bcopy-arguments right away. */
3220 NO_DEFER_POP;
3221 #ifdef TARGET_MEM_FUNCTIONS
3222 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3223 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3224 convert_to_mode (TYPE_MODE (sizetype),
3225 size, TREE_UNSIGNED (sizetype)),
3226 TYPE_MODE (sizetype));
3227 #else
3228 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3229 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3230 convert_to_mode (TYPE_MODE (integer_type_node),
3231 size,
3232 TREE_UNSIGNED (integer_type_node)),
3233 TYPE_MODE (integer_type_node));
3234 #endif
3235 OK_DEFER_POP;
3238 else if (partial > 0)
3240 /* Scalar partly in registers. */
3242 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3243 int i;
3244 int not_stack;
3245 /* # words of start of argument
3246 that we must make space for but need not store. */
3247 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3248 int args_offset = INTVAL (args_so_far);
3249 int skip;
3251 /* Push padding now if padding above and stack grows down,
3252 or if padding below and stack grows up.
3253 But if space already allocated, this has already been done. */
3254 if (extra && args_addr == 0
3255 && where_pad != none && where_pad != stack_direction)
3256 anti_adjust_stack (GEN_INT (extra));
3258 /* If we make space by pushing it, we might as well push
3259 the real data. Otherwise, we can leave OFFSET nonzero
3260 and leave the space uninitialized. */
3261 if (args_addr == 0)
3262 offset = 0;
3264 /* Now NOT_STACK gets the number of words that we don't need to
3265 allocate on the stack. */
3266 not_stack = partial - offset;
3268 /* If the partial register-part of the arg counts in its stack size,
3269 skip the part of stack space corresponding to the registers.
3270 Otherwise, start copying to the beginning of the stack space,
3271 by setting SKIP to 0. */
3272 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3274 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3275 x = validize_mem (force_const_mem (mode, x));
3277 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3278 SUBREGs of such registers are not allowed. */
3279 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3280 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3281 x = copy_to_reg (x);
3283 /* Loop over all the words allocated on the stack for this arg. */
3284 /* We can do it by words, because any scalar bigger than a word
3285 has a size a multiple of a word. */
3286 #ifndef PUSH_ARGS_REVERSED
3287 for (i = not_stack; i < size; i++)
3288 #else
3289 for (i = size - 1; i >= not_stack; i--)
3290 #endif
3291 if (i >= not_stack + offset)
3292 emit_push_insn (operand_subword_force (x, i, mode),
3293 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3294 0, args_addr,
3295 GEN_INT (args_offset + ((i - not_stack + skip)
3296 * UNITS_PER_WORD)),
3297 reg_parm_stack_space, alignment_pad);
3299 else
3301 rtx addr;
3302 rtx target = NULL_RTX;
3303 rtx dest;
3305 /* Push padding now if padding above and stack grows down,
3306 or if padding below and stack grows up.
3307 But if space already allocated, this has already been done. */
3308 if (extra && args_addr == 0
3309 && where_pad != none && where_pad != stack_direction)
3310 anti_adjust_stack (GEN_INT (extra));
3312 #ifdef PUSH_ROUNDING
3313 if (args_addr == 0 && PUSH_ARGS)
3315 addr = gen_push_operand ();
3316 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3318 else
3319 #endif
3321 if (GET_CODE (args_so_far) == CONST_INT)
3322 addr
3323 = memory_address (mode,
3324 plus_constant (args_addr,
3325 INTVAL (args_so_far)));
3326 else
3327 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3328 args_so_far));
3329 target = addr;
3332 dest = gen_rtx_MEM (mode, addr);
3333 if (type != 0)
3335 set_mem_attributes (dest, type, 1);
3336 /* Function incoming arguments may overlap with sibling call
3337 outgoing arguments and we cannot allow reordering of reads
3338 from function arguments with stores to outgoing arguments
3339 of sibling calls. */
3340 MEM_ALIAS_SET (dest) = 0;
3343 emit_move_insn (dest, x);
3345 if (current_function_check_memory_usage && ! in_check_memory_usage)
3347 in_check_memory_usage = 1;
3348 if (target == 0)
3349 target = get_push_address (GET_MODE_SIZE (mode));
3351 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3352 emit_library_call (chkr_copy_bitmap_libfunc,
3353 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3354 Pmode, XEXP (x, 0), Pmode,
3355 GEN_INT (GET_MODE_SIZE (mode)),
3356 TYPE_MODE (sizetype));
3357 else
3358 emit_library_call (chkr_set_right_libfunc,
3359 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3360 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
3361 TYPE_MODE (sizetype),
3362 GEN_INT (MEMORY_USE_RW),
3363 TYPE_MODE (integer_type_node));
3364 in_check_memory_usage = 0;
3368 ret:
3369 /* If part should go in registers, copy that part
3370 into the appropriate registers. Do this now, at the end,
3371 since mem-to-mem copies above may do function calls. */
3372 if (partial > 0 && reg != 0)
3374 /* Handle calls that pass values in multiple non-contiguous locations.
3375 The Irix 6 ABI has examples of this. */
3376 if (GET_CODE (reg) == PARALLEL)
3377 emit_group_load (reg, x, -1, align); /* ??? size? */
3378 else
3379 move_block_to_reg (REGNO (reg), x, partial, mode);
3382 if (extra && args_addr == 0 && where_pad == stack_direction)
3383 anti_adjust_stack (GEN_INT (extra));
3385 if (alignment_pad && args_addr == 0)
3386 anti_adjust_stack (alignment_pad);
3389 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3390 operations. */
3392 static rtx
3393 get_subtarget (x)
3394 rtx x;
3396 return ((x == 0
3397 /* Only registers can be subtargets. */
3398 || GET_CODE (x) != REG
3399 /* If the register is readonly, it can't be set more than once. */
3400 || RTX_UNCHANGING_P (x)
3401 /* Don't use hard regs to avoid extending their life. */
3402 || REGNO (x) < FIRST_PSEUDO_REGISTER
3403 /* Avoid subtargets inside loops,
3404 since they hide some invariant expressions. */
3405 || preserve_subexpressions_p ())
3406 ? 0 : x);
3409 /* Expand an assignment that stores the value of FROM into TO.
3410 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3411 (This may contain a QUEUED rtx;
3412 if the value is constant, this rtx is a constant.)
3413 Otherwise, the returned value is NULL_RTX.
3415 SUGGEST_REG is no longer actually used.
3416 It used to mean, copy the value through a register
3417 and return that register, if that is possible.
3418 We now use WANT_VALUE to decide whether to do this. */
3421 expand_assignment (to, from, want_value, suggest_reg)
3422 tree to, from;
3423 int want_value;
3424 int suggest_reg ATTRIBUTE_UNUSED;
3426 register rtx to_rtx = 0;
3427 rtx result;
3429 /* Don't crash if the lhs of the assignment was erroneous. */
3431 if (TREE_CODE (to) == ERROR_MARK)
3433 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3434 return want_value ? result : NULL_RTX;
3437 /* Assignment of a structure component needs special treatment
3438 if the structure component's rtx is not simply a MEM.
3439 Assignment of an array element at a constant index, and assignment of
3440 an array element in an unaligned packed structure field, has the same
3441 problem. */
3443 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3444 || TREE_CODE (to) == ARRAY_REF)
3446 enum machine_mode mode1;
3447 HOST_WIDE_INT bitsize, bitpos;
3448 tree offset;
3449 int unsignedp;
3450 int volatilep = 0;
3451 tree tem;
3452 unsigned int alignment;
3454 push_temp_slots ();
3455 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3456 &unsignedp, &volatilep, &alignment);
3458 /* If we are going to use store_bit_field and extract_bit_field,
3459 make sure to_rtx will be safe for multiple use. */
3461 if (mode1 == VOIDmode && want_value)
3462 tem = stabilize_reference (tem);
3464 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3465 if (offset != 0)
3467 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3469 if (GET_CODE (to_rtx) != MEM)
3470 abort ();
3472 if (GET_MODE (offset_rtx) != ptr_mode)
3474 #ifdef POINTERS_EXTEND_UNSIGNED
3475 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3476 #else
3477 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3478 #endif
3481 /* A constant address in TO_RTX can have VOIDmode, we must not try
3482 to call force_reg for that case. Avoid that case. */
3483 if (GET_CODE (to_rtx) == MEM
3484 && GET_MODE (to_rtx) == BLKmode
3485 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3486 && bitsize
3487 && (bitpos % bitsize) == 0
3488 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3489 && alignment == GET_MODE_ALIGNMENT (mode1))
3491 rtx temp = change_address (to_rtx, mode1,
3492 plus_constant (XEXP (to_rtx, 0),
3493 (bitpos /
3494 BITS_PER_UNIT)));
3495 if (GET_CODE (XEXP (temp, 0)) == REG)
3496 to_rtx = temp;
3497 else
3498 to_rtx = change_address (to_rtx, mode1,
3499 force_reg (GET_MODE (XEXP (temp, 0)),
3500 XEXP (temp, 0)));
3501 bitpos = 0;
3504 to_rtx = change_address (to_rtx, VOIDmode,
3505 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3506 force_reg (ptr_mode,
3507 offset_rtx)));
3510 if (volatilep)
3512 if (GET_CODE (to_rtx) == MEM)
3514 /* When the offset is zero, to_rtx is the address of the
3515 structure we are storing into, and hence may be shared.
3516 We must make a new MEM before setting the volatile bit. */
3517 if (offset == 0)
3518 to_rtx = copy_rtx (to_rtx);
3520 MEM_VOLATILE_P (to_rtx) = 1;
3522 #if 0 /* This was turned off because, when a field is volatile
3523 in an object which is not volatile, the object may be in a register,
3524 and then we would abort over here. */
3525 else
3526 abort ();
3527 #endif
3530 if (TREE_CODE (to) == COMPONENT_REF
3531 && TREE_READONLY (TREE_OPERAND (to, 1)))
3533 if (offset == 0)
3534 to_rtx = copy_rtx (to_rtx);
3536 RTX_UNCHANGING_P (to_rtx) = 1;
3539 /* Check the access. */
3540 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3542 rtx to_addr;
3543 int size;
3544 int best_mode_size;
3545 enum machine_mode best_mode;
3547 best_mode = get_best_mode (bitsize, bitpos,
3548 TYPE_ALIGN (TREE_TYPE (tem)),
3549 mode1, volatilep);
3550 if (best_mode == VOIDmode)
3551 best_mode = QImode;
3553 best_mode_size = GET_MODE_BITSIZE (best_mode);
3554 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3555 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3556 size *= GET_MODE_SIZE (best_mode);
3558 /* Check the access right of the pointer. */
3559 in_check_memory_usage = 1;
3560 if (size)
3561 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3562 VOIDmode, 3, to_addr, Pmode,
3563 GEN_INT (size), TYPE_MODE (sizetype),
3564 GEN_INT (MEMORY_USE_WO),
3565 TYPE_MODE (integer_type_node));
3566 in_check_memory_usage = 0;
3569 /* If this is a varying-length object, we must get the address of
3570 the source and do an explicit block move. */
3571 if (bitsize < 0)
3573 unsigned int from_align;
3574 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3575 rtx inner_to_rtx
3576 = change_address (to_rtx, VOIDmode,
3577 plus_constant (XEXP (to_rtx, 0),
3578 bitpos / BITS_PER_UNIT));
3580 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
3581 MIN (alignment, from_align));
3582 free_temp_slots ();
3583 pop_temp_slots ();
3584 return to_rtx;
3586 else
3588 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3589 (want_value
3590 /* Spurious cast for HPUX compiler. */
3591 ? ((enum machine_mode)
3592 TYPE_MODE (TREE_TYPE (to)))
3593 : VOIDmode),
3594 unsignedp,
3595 alignment,
3596 int_size_in_bytes (TREE_TYPE (tem)),
3597 get_alias_set (to));
3599 preserve_temp_slots (result);
3600 free_temp_slots ();
3601 pop_temp_slots ();
3603 /* If the value is meaningful, convert RESULT to the proper mode.
3604 Otherwise, return nothing. */
3605 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3606 TYPE_MODE (TREE_TYPE (from)),
3607 result,
3608 TREE_UNSIGNED (TREE_TYPE (to)))
3609 : NULL_RTX);
3613 /* If the rhs is a function call and its value is not an aggregate,
3614 call the function before we start to compute the lhs.
3615 This is needed for correct code for cases such as
3616 val = setjmp (buf) on machines where reference to val
3617 requires loading up part of an address in a separate insn.
3619 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3620 since it might be a promoted variable where the zero- or sign- extension
3621 needs to be done. Handling this in the normal way is safe because no
3622 computation is done before the call. */
3623 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3624 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3625 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3626 && GET_CODE (DECL_RTL (to)) == REG))
3628 rtx value;
3630 push_temp_slots ();
3631 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3632 if (to_rtx == 0)
3633 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3635 /* Handle calls that return values in multiple non-contiguous locations.
3636 The Irix 6 ABI has examples of this. */
3637 if (GET_CODE (to_rtx) == PARALLEL)
3638 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3639 TYPE_ALIGN (TREE_TYPE (from)));
3640 else if (GET_MODE (to_rtx) == BLKmode)
3641 emit_block_move (to_rtx, value, expr_size (from),
3642 TYPE_ALIGN (TREE_TYPE (from)));
3643 else
3645 #ifdef POINTERS_EXTEND_UNSIGNED
3646 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3647 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3648 value = convert_memory_address (GET_MODE (to_rtx), value);
3649 #endif
3650 emit_move_insn (to_rtx, value);
3652 preserve_temp_slots (to_rtx);
3653 free_temp_slots ();
3654 pop_temp_slots ();
3655 return want_value ? to_rtx : NULL_RTX;
3658 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3659 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3661 if (to_rtx == 0)
3663 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3664 if (GET_CODE (to_rtx) == MEM)
3665 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3668 /* Don't move directly into a return register. */
3669 if (TREE_CODE (to) == RESULT_DECL
3670 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3672 rtx temp;
3674 push_temp_slots ();
3675 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3677 if (GET_CODE (to_rtx) == PARALLEL)
3678 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3679 TYPE_ALIGN (TREE_TYPE (from)));
3680 else
3681 emit_move_insn (to_rtx, temp);
3683 preserve_temp_slots (to_rtx);
3684 free_temp_slots ();
3685 pop_temp_slots ();
3686 return want_value ? to_rtx : NULL_RTX;
3689 /* In case we are returning the contents of an object which overlaps
3690 the place the value is being stored, use a safe function when copying
3691 a value through a pointer into a structure value return block. */
3692 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3693 && current_function_returns_struct
3694 && !current_function_returns_pcc_struct)
3696 rtx from_rtx, size;
3698 push_temp_slots ();
3699 size = expr_size (from);
3700 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3701 EXPAND_MEMORY_USE_DONT);
3703 /* Copy the rights of the bitmap. */
3704 if (current_function_check_memory_usage)
3705 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3706 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3707 XEXP (from_rtx, 0), Pmode,
3708 convert_to_mode (TYPE_MODE (sizetype),
3709 size, TREE_UNSIGNED (sizetype)),
3710 TYPE_MODE (sizetype));
3712 #ifdef TARGET_MEM_FUNCTIONS
3713 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3714 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3715 XEXP (from_rtx, 0), Pmode,
3716 convert_to_mode (TYPE_MODE (sizetype),
3717 size, TREE_UNSIGNED (sizetype)),
3718 TYPE_MODE (sizetype));
3719 #else
3720 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3721 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3722 XEXP (to_rtx, 0), Pmode,
3723 convert_to_mode (TYPE_MODE (integer_type_node),
3724 size, TREE_UNSIGNED (integer_type_node)),
3725 TYPE_MODE (integer_type_node));
3726 #endif
3728 preserve_temp_slots (to_rtx);
3729 free_temp_slots ();
3730 pop_temp_slots ();
3731 return want_value ? to_rtx : NULL_RTX;
3734 /* Compute FROM and store the value in the rtx we got. */
3736 push_temp_slots ();
3737 result = store_expr (from, to_rtx, want_value);
3738 preserve_temp_slots (result);
3739 free_temp_slots ();
3740 pop_temp_slots ();
3741 return want_value ? result : NULL_RTX;
3744 /* Generate code for computing expression EXP,
3745 and storing the value into TARGET.
3746 TARGET may contain a QUEUED rtx.
3748 If WANT_VALUE is nonzero, return a copy of the value
3749 not in TARGET, so that we can be sure to use the proper
3750 value in a containing expression even if TARGET has something
3751 else stored in it. If possible, we copy the value through a pseudo
3752 and return that pseudo. Or, if the value is constant, we try to
3753 return the constant. In some cases, we return a pseudo
3754 copied *from* TARGET.
3756 If the mode is BLKmode then we may return TARGET itself.
3757 It turns out that in BLKmode it doesn't cause a problem.
3758 because C has no operators that could combine two different
3759 assignments into the same BLKmode object with different values
3760 with no sequence point. Will other languages need this to
3761 be more thorough?
3763 If WANT_VALUE is 0, we return NULL, to make sure
3764 to catch quickly any cases where the caller uses the value
3765 and fails to set WANT_VALUE. */
3768 store_expr (exp, target, want_value)
3769 register tree exp;
3770 register rtx target;
3771 int want_value;
3773 register rtx temp;
3774 int dont_return_target = 0;
3776 if (TREE_CODE (exp) == COMPOUND_EXPR)
3778 /* Perform first part of compound expression, then assign from second
3779 part. */
3780 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3781 emit_queue ();
3782 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3784 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3786 /* For conditional expression, get safe form of the target. Then
3787 test the condition, doing the appropriate assignment on either
3788 side. This avoids the creation of unnecessary temporaries.
3789 For non-BLKmode, it is more efficient not to do this. */
3791 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3793 emit_queue ();
3794 target = protect_from_queue (target, 1);
3796 do_pending_stack_adjust ();
3797 NO_DEFER_POP;
3798 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3799 start_cleanup_deferral ();
3800 store_expr (TREE_OPERAND (exp, 1), target, 0);
3801 end_cleanup_deferral ();
3802 emit_queue ();
3803 emit_jump_insn (gen_jump (lab2));
3804 emit_barrier ();
3805 emit_label (lab1);
3806 start_cleanup_deferral ();
3807 store_expr (TREE_OPERAND (exp, 2), target, 0);
3808 end_cleanup_deferral ();
3809 emit_queue ();
3810 emit_label (lab2);
3811 OK_DEFER_POP;
3813 return want_value ? target : NULL_RTX;
3815 else if (queued_subexp_p (target))
3816 /* If target contains a postincrement, let's not risk
3817 using it as the place to generate the rhs. */
3819 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3821 /* Expand EXP into a new pseudo. */
3822 temp = gen_reg_rtx (GET_MODE (target));
3823 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3825 else
3826 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3828 /* If target is volatile, ANSI requires accessing the value
3829 *from* the target, if it is accessed. So make that happen.
3830 In no case return the target itself. */
3831 if (! MEM_VOLATILE_P (target) && want_value)
3832 dont_return_target = 1;
3834 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3835 && GET_MODE (target) != BLKmode)
3836 /* If target is in memory and caller wants value in a register instead,
3837 arrange that. Pass TARGET as target for expand_expr so that,
3838 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3839 We know expand_expr will not use the target in that case.
3840 Don't do this if TARGET is volatile because we are supposed
3841 to write it and then read it. */
3843 temp = expand_expr (exp, target, GET_MODE (target), 0);
3844 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3845 temp = copy_to_reg (temp);
3846 dont_return_target = 1;
3848 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3849 /* If this is an scalar in a register that is stored in a wider mode
3850 than the declared mode, compute the result into its declared mode
3851 and then convert to the wider mode. Our value is the computed
3852 expression. */
3854 /* If we don't want a value, we can do the conversion inside EXP,
3855 which will often result in some optimizations. Do the conversion
3856 in two steps: first change the signedness, if needed, then
3857 the extend. But don't do this if the type of EXP is a subtype
3858 of something else since then the conversion might involve
3859 more than just converting modes. */
3860 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3861 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3863 if (TREE_UNSIGNED (TREE_TYPE (exp))
3864 != SUBREG_PROMOTED_UNSIGNED_P (target))
3866 = convert
3867 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3868 TREE_TYPE (exp)),
3869 exp);
3871 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3872 SUBREG_PROMOTED_UNSIGNED_P (target)),
3873 exp);
3876 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3878 /* If TEMP is a volatile MEM and we want a result value, make
3879 the access now so it gets done only once. Likewise if
3880 it contains TARGET. */
3881 if (GET_CODE (temp) == MEM && want_value
3882 && (MEM_VOLATILE_P (temp)
3883 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3884 temp = copy_to_reg (temp);
3886 /* If TEMP is a VOIDmode constant, use convert_modes to make
3887 sure that we properly convert it. */
3888 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3889 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3890 TYPE_MODE (TREE_TYPE (exp)), temp,
3891 SUBREG_PROMOTED_UNSIGNED_P (target));
3893 convert_move (SUBREG_REG (target), temp,
3894 SUBREG_PROMOTED_UNSIGNED_P (target));
3896 /* If we promoted a constant, change the mode back down to match
3897 target. Otherwise, the caller might get confused by a result whose
3898 mode is larger than expected. */
3900 if (want_value && GET_MODE (temp) != GET_MODE (target)
3901 && GET_MODE (temp) != VOIDmode)
3903 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
3904 SUBREG_PROMOTED_VAR_P (temp) = 1;
3905 SUBREG_PROMOTED_UNSIGNED_P (temp)
3906 = SUBREG_PROMOTED_UNSIGNED_P (target);
3909 return want_value ? temp : NULL_RTX;
3911 else
3913 temp = expand_expr (exp, target, GET_MODE (target), 0);
3914 /* Return TARGET if it's a specified hardware register.
3915 If TARGET is a volatile mem ref, either return TARGET
3916 or return a reg copied *from* TARGET; ANSI requires this.
3918 Otherwise, if TEMP is not TARGET, return TEMP
3919 if it is constant (for efficiency),
3920 or if we really want the correct value. */
3921 if (!(target && GET_CODE (target) == REG
3922 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3923 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3924 && ! rtx_equal_p (temp, target)
3925 && (CONSTANT_P (temp) || want_value))
3926 dont_return_target = 1;
3929 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3930 the same as that of TARGET, adjust the constant. This is needed, for
3931 example, in case it is a CONST_DOUBLE and we want only a word-sized
3932 value. */
3933 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3934 && TREE_CODE (exp) != ERROR_MARK
3935 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3936 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3937 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3939 if (current_function_check_memory_usage
3940 && GET_CODE (target) == MEM
3941 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3943 in_check_memory_usage = 1;
3944 if (GET_CODE (temp) == MEM)
3945 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3946 VOIDmode, 3, XEXP (target, 0), Pmode,
3947 XEXP (temp, 0), Pmode,
3948 expr_size (exp), TYPE_MODE (sizetype));
3949 else
3950 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3951 VOIDmode, 3, XEXP (target, 0), Pmode,
3952 expr_size (exp), TYPE_MODE (sizetype),
3953 GEN_INT (MEMORY_USE_WO),
3954 TYPE_MODE (integer_type_node));
3955 in_check_memory_usage = 0;
3958 /* If value was not generated in the target, store it there.
3959 Convert the value to TARGET's type first if nec. */
3960 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3961 one or both of them are volatile memory refs, we have to distinguish
3962 two cases:
3963 - expand_expr has used TARGET. In this case, we must not generate
3964 another copy. This can be detected by TARGET being equal according
3965 to == .
3966 - expand_expr has not used TARGET - that means that the source just
3967 happens to have the same RTX form. Since temp will have been created
3968 by expand_expr, it will compare unequal according to == .
3969 We must generate a copy in this case, to reach the correct number
3970 of volatile memory references. */
3972 if ((! rtx_equal_p (temp, target)
3973 || (temp != target && (side_effects_p (temp)
3974 || side_effects_p (target))))
3975 && TREE_CODE (exp) != ERROR_MARK)
3977 target = protect_from_queue (target, 1);
3978 if (GET_MODE (temp) != GET_MODE (target)
3979 && GET_MODE (temp) != VOIDmode)
3981 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3982 if (dont_return_target)
3984 /* In this case, we will return TEMP,
3985 so make sure it has the proper mode.
3986 But don't forget to store the value into TARGET. */
3987 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3988 emit_move_insn (target, temp);
3990 else
3991 convert_move (target, temp, unsignedp);
3994 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3996 /* Handle copying a string constant into an array.
3997 The string constant may be shorter than the array.
3998 So copy just the string's actual length, and clear the rest. */
3999 rtx size;
4000 rtx addr;
4002 /* Get the size of the data type of the string,
4003 which is actually the size of the target. */
4004 size = expr_size (exp);
4005 if (GET_CODE (size) == CONST_INT
4006 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4007 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
4008 else
4010 /* Compute the size of the data to copy from the string. */
4011 tree copy_size
4012 = size_binop (MIN_EXPR,
4013 make_tree (sizetype, size),
4014 size_int (TREE_STRING_LENGTH (exp)));
4015 unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
4016 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4017 VOIDmode, 0);
4018 rtx label = 0;
4020 /* Copy that much. */
4021 emit_block_move (target, temp, copy_size_rtx,
4022 TYPE_ALIGN (TREE_TYPE (exp)));
4024 /* Figure out how much is left in TARGET that we have to clear.
4025 Do all calculations in ptr_mode. */
4027 addr = XEXP (target, 0);
4028 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4030 if (GET_CODE (copy_size_rtx) == CONST_INT)
4032 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
4033 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
4034 align = MIN (align, (BITS_PER_UNIT
4035 * (INTVAL (copy_size_rtx)
4036 & - INTVAL (copy_size_rtx))));
4038 else
4040 addr = force_reg (ptr_mode, addr);
4041 addr = expand_binop (ptr_mode, add_optab, addr,
4042 copy_size_rtx, NULL_RTX, 0,
4043 OPTAB_LIB_WIDEN);
4045 size = expand_binop (ptr_mode, sub_optab, size,
4046 copy_size_rtx, NULL_RTX, 0,
4047 OPTAB_LIB_WIDEN);
4049 align = BITS_PER_UNIT;
4050 label = gen_label_rtx ();
4051 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4052 GET_MODE (size), 0, 0, label);
4054 align = MIN (align, expr_align (copy_size));
4056 if (size != const0_rtx)
4058 rtx dest = gen_rtx_MEM (BLKmode, addr);
4060 MEM_COPY_ATTRIBUTES (dest, target);
4062 /* Be sure we can write on ADDR. */
4063 in_check_memory_usage = 1;
4064 if (current_function_check_memory_usage)
4065 emit_library_call (chkr_check_addr_libfunc,
4066 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4067 addr, Pmode,
4068 size, TYPE_MODE (sizetype),
4069 GEN_INT (MEMORY_USE_WO),
4070 TYPE_MODE (integer_type_node));
4071 in_check_memory_usage = 0;
4072 clear_storage (dest, size, align);
4075 if (label)
4076 emit_label (label);
4079 /* Handle calls that return values in multiple non-contiguous locations.
4080 The Irix 6 ABI has examples of this. */
4081 else if (GET_CODE (target) == PARALLEL)
4082 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4083 TYPE_ALIGN (TREE_TYPE (exp)));
4084 else if (GET_MODE (temp) == BLKmode)
4085 emit_block_move (target, temp, expr_size (exp),
4086 TYPE_ALIGN (TREE_TYPE (exp)));
4087 else
4088 emit_move_insn (target, temp);
4091 /* If we don't want a value, return NULL_RTX. */
4092 if (! want_value)
4093 return NULL_RTX;
4095 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4096 ??? The latter test doesn't seem to make sense. */
4097 else if (dont_return_target && GET_CODE (temp) != MEM)
4098 return temp;
4100 /* Return TARGET itself if it is a hard register. */
4101 else if (want_value && GET_MODE (target) != BLKmode
4102 && ! (GET_CODE (target) == REG
4103 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4104 return copy_to_reg (target);
4106 else
4107 return target;
4110 /* Return 1 if EXP just contains zeros. */
4112 static int
4113 is_zeros_p (exp)
4114 tree exp;
4116 tree elt;
4118 switch (TREE_CODE (exp))
4120 case CONVERT_EXPR:
4121 case NOP_EXPR:
4122 case NON_LVALUE_EXPR:
4123 return is_zeros_p (TREE_OPERAND (exp, 0));
4125 case INTEGER_CST:
4126 return integer_zerop (exp);
4128 case COMPLEX_CST:
4129 return
4130 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4132 case REAL_CST:
4133 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4135 case CONSTRUCTOR:
4136 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4137 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4138 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4139 if (! is_zeros_p (TREE_VALUE (elt)))
4140 return 0;
4142 return 1;
4144 default:
4145 return 0;
4149 /* Return 1 if EXP contains mostly (3/4) zeros. */
4151 static int
4152 mostly_zeros_p (exp)
4153 tree exp;
4155 if (TREE_CODE (exp) == CONSTRUCTOR)
4157 int elts = 0, zeros = 0;
4158 tree elt = CONSTRUCTOR_ELTS (exp);
4159 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4161 /* If there are no ranges of true bits, it is all zero. */
4162 return elt == NULL_TREE;
4164 for (; elt; elt = TREE_CHAIN (elt))
4166 /* We do not handle the case where the index is a RANGE_EXPR,
4167 so the statistic will be somewhat inaccurate.
4168 We do make a more accurate count in store_constructor itself,
4169 so since this function is only used for nested array elements,
4170 this should be close enough. */
4171 if (mostly_zeros_p (TREE_VALUE (elt)))
4172 zeros++;
4173 elts++;
4176 return 4 * zeros >= 3 * elts;
4179 return is_zeros_p (exp);
4182 /* Helper function for store_constructor.
4183 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4184 TYPE is the type of the CONSTRUCTOR, not the element type.
4185 ALIGN and CLEARED are as for store_constructor.
4186 ALIAS_SET is the alias set to use for any stores.
4188 This provides a recursive shortcut back to store_constructor when it isn't
4189 necessary to go through store_field. This is so that we can pass through
4190 the cleared field to let store_constructor know that we may not have to
4191 clear a substructure if the outer structure has already been cleared. */
4193 static void
4194 store_constructor_field (target, bitsize, bitpos,
4195 mode, exp, type, align, cleared, alias_set)
4196 rtx target;
4197 unsigned HOST_WIDE_INT bitsize;
4198 HOST_WIDE_INT bitpos;
4199 enum machine_mode mode;
4200 tree exp, type;
4201 unsigned int align;
4202 int cleared;
4203 int alias_set;
4205 if (TREE_CODE (exp) == CONSTRUCTOR
4206 && bitpos % BITS_PER_UNIT == 0
4207 /* If we have a non-zero bitpos for a register target, then we just
4208 let store_field do the bitfield handling. This is unlikely to
4209 generate unnecessary clear instructions anyways. */
4210 && (bitpos == 0 || GET_CODE (target) == MEM))
4212 if (bitpos != 0)
4213 target
4214 = change_address (target,
4215 GET_MODE (target) == BLKmode
4216 || 0 != (bitpos
4217 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4218 ? BLKmode : VOIDmode,
4219 plus_constant (XEXP (target, 0),
4220 bitpos / BITS_PER_UNIT));
4222 MEM_ALIAS_SET (target) = alias_set;
4223 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4225 else
4226 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4227 int_size_in_bytes (type), alias_set);
4230 /* Store the value of constructor EXP into the rtx TARGET.
4231 TARGET is either a REG or a MEM.
4232 ALIGN is the maximum known alignment for TARGET.
4233 CLEARED is true if TARGET is known to have been zero'd.
4234 SIZE is the number of bytes of TARGET we are allowed to modify: this
4235 may not be the same as the size of EXP if we are assigning to a field
4236 which has been packed to exclude padding bits. */
4238 static void
4239 store_constructor (exp, target, align, cleared, size)
4240 tree exp;
4241 rtx target;
4242 unsigned int align;
4243 int cleared;
4244 HOST_WIDE_INT size;
4246 tree type = TREE_TYPE (exp);
4247 #ifdef WORD_REGISTER_OPERATIONS
4248 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4249 #endif
4251 /* We know our target cannot conflict, since safe_from_p has been called. */
4252 #if 0
4253 /* Don't try copying piece by piece into a hard register
4254 since that is vulnerable to being clobbered by EXP.
4255 Instead, construct in a pseudo register and then copy it all. */
4256 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4258 rtx temp = gen_reg_rtx (GET_MODE (target));
4259 store_constructor (exp, temp, align, cleared, size);
4260 emit_move_insn (target, temp);
4261 return;
4263 #endif
4265 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4266 || TREE_CODE (type) == QUAL_UNION_TYPE)
4268 register tree elt;
4270 /* Inform later passes that the whole union value is dead. */
4271 if ((TREE_CODE (type) == UNION_TYPE
4272 || TREE_CODE (type) == QUAL_UNION_TYPE)
4273 && ! cleared)
4275 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4277 /* If the constructor is empty, clear the union. */
4278 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4279 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4282 /* If we are building a static constructor into a register,
4283 set the initial value as zero so we can fold the value into
4284 a constant. But if more than one register is involved,
4285 this probably loses. */
4286 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4287 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4289 if (! cleared)
4290 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4292 cleared = 1;
4295 /* If the constructor has fewer fields than the structure
4296 or if we are initializing the structure to mostly zeros,
4297 clear the whole structure first. Don't do this is TARGET is
4298 register whose mode size isn't equal to SIZE since clear_storage
4299 can't handle this case. */
4300 else if (size > 0
4301 && ((list_length (CONSTRUCTOR_ELTS (exp))
4302 != fields_length (type))
4303 || mostly_zeros_p (exp))
4304 && (GET_CODE (target) != REG
4305 || GET_MODE_SIZE (GET_MODE (target)) == size))
4307 if (! cleared)
4308 clear_storage (target, GEN_INT (size), align);
4310 cleared = 1;
4312 else if (! cleared)
4313 /* Inform later passes that the old value is dead. */
4314 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4316 /* Store each element of the constructor into
4317 the corresponding field of TARGET. */
4319 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4321 register tree field = TREE_PURPOSE (elt);
4322 #ifdef WORD_REGISTER_OPERATIONS
4323 tree value = TREE_VALUE (elt);
4324 #endif
4325 register enum machine_mode mode;
4326 HOST_WIDE_INT bitsize;
4327 HOST_WIDE_INT bitpos = 0;
4328 int unsignedp;
4329 tree offset;
4330 rtx to_rtx = target;
4332 /* Just ignore missing fields.
4333 We cleared the whole structure, above,
4334 if any fields are missing. */
4335 if (field == 0)
4336 continue;
4338 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4339 continue;
4341 if (host_integerp (DECL_SIZE (field), 1))
4342 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4343 else
4344 bitsize = -1;
4346 unsignedp = TREE_UNSIGNED (field);
4347 mode = DECL_MODE (field);
4348 if (DECL_BIT_FIELD (field))
4349 mode = VOIDmode;
4351 offset = DECL_FIELD_OFFSET (field);
4352 if (host_integerp (offset, 0)
4353 && host_integerp (bit_position (field), 0))
4355 bitpos = int_bit_position (field);
4356 offset = 0;
4358 else
4359 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4361 if (offset)
4363 rtx offset_rtx;
4365 if (contains_placeholder_p (offset))
4366 offset = build (WITH_RECORD_EXPR, sizetype,
4367 offset, make_tree (TREE_TYPE (exp), target));
4369 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4370 if (GET_CODE (to_rtx) != MEM)
4371 abort ();
4373 if (GET_MODE (offset_rtx) != ptr_mode)
4375 #ifdef POINTERS_EXTEND_UNSIGNED
4376 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4377 #else
4378 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4379 #endif
4382 to_rtx
4383 = change_address (to_rtx, VOIDmode,
4384 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4385 force_reg (ptr_mode,
4386 offset_rtx)));
4387 align = DECL_OFFSET_ALIGN (field);
4390 if (TREE_READONLY (field))
4392 if (GET_CODE (to_rtx) == MEM)
4393 to_rtx = copy_rtx (to_rtx);
4395 RTX_UNCHANGING_P (to_rtx) = 1;
4398 #ifdef WORD_REGISTER_OPERATIONS
4399 /* If this initializes a field that is smaller than a word, at the
4400 start of a word, try to widen it to a full word.
4401 This special case allows us to output C++ member function
4402 initializations in a form that the optimizers can understand. */
4403 if (GET_CODE (target) == REG
4404 && bitsize < BITS_PER_WORD
4405 && bitpos % BITS_PER_WORD == 0
4406 && GET_MODE_CLASS (mode) == MODE_INT
4407 && TREE_CODE (value) == INTEGER_CST
4408 && exp_size >= 0
4409 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4411 tree type = TREE_TYPE (value);
4412 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4414 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4415 value = convert (type, value);
4417 if (BYTES_BIG_ENDIAN)
4418 value
4419 = fold (build (LSHIFT_EXPR, type, value,
4420 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4421 bitsize = BITS_PER_WORD;
4422 mode = word_mode;
4424 #endif
4425 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4426 TREE_VALUE (elt), type, align, cleared,
4427 DECL_NONADDRESSABLE_P (field)
4428 ? MEM_ALIAS_SET (to_rtx)
4429 : get_alias_set (TREE_TYPE (field)));
4432 else if (TREE_CODE (type) == ARRAY_TYPE)
4434 register tree elt;
4435 register int i;
4436 int need_to_clear;
4437 tree domain = TYPE_DOMAIN (type);
4438 tree elttype = TREE_TYPE (type);
4439 int const_bounds_p = (host_integerp (TYPE_MIN_VALUE (domain), 0)
4440 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4441 HOST_WIDE_INT minelt;
4442 HOST_WIDE_INT maxelt;
4444 /* If we have constant bounds for the range of the type, get them. */
4445 if (const_bounds_p)
4447 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4448 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4451 /* If the constructor has fewer elements than the array,
4452 clear the whole array first. Similarly if this is
4453 static constructor of a non-BLKmode object. */
4454 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4455 need_to_clear = 1;
4456 else
4458 HOST_WIDE_INT count = 0, zero_count = 0;
4459 need_to_clear = ! const_bounds_p;
4461 /* This loop is a more accurate version of the loop in
4462 mostly_zeros_p (it handles RANGE_EXPR in an index).
4463 It is also needed to check for missing elements. */
4464 for (elt = CONSTRUCTOR_ELTS (exp);
4465 elt != NULL_TREE && ! need_to_clear;
4466 elt = TREE_CHAIN (elt))
4468 tree index = TREE_PURPOSE (elt);
4469 HOST_WIDE_INT this_node_count;
4471 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4473 tree lo_index = TREE_OPERAND (index, 0);
4474 tree hi_index = TREE_OPERAND (index, 1);
4476 if (! host_integerp (lo_index, 1)
4477 || ! host_integerp (hi_index, 1))
4479 need_to_clear = 1;
4480 break;
4483 this_node_count = (tree_low_cst (hi_index, 1)
4484 - tree_low_cst (lo_index, 1) + 1);
4486 else
4487 this_node_count = 1;
4489 count += this_node_count;
4490 if (mostly_zeros_p (TREE_VALUE (elt)))
4491 zero_count += this_node_count;
4494 /* Clear the entire array first if there are any missing elements,
4495 or if the incidence of zero elements is >= 75%. */
4496 if (! need_to_clear
4497 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4498 need_to_clear = 1;
4501 if (need_to_clear && size > 0)
4503 if (! cleared)
4504 clear_storage (target, GEN_INT (size), align);
4505 cleared = 1;
4507 else
4508 /* Inform later passes that the old value is dead. */
4509 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4511 /* Store each element of the constructor into
4512 the corresponding element of TARGET, determined
4513 by counting the elements. */
4514 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4515 elt;
4516 elt = TREE_CHAIN (elt), i++)
4518 register enum machine_mode mode;
4519 HOST_WIDE_INT bitsize;
4520 HOST_WIDE_INT bitpos;
4521 int unsignedp;
4522 tree value = TREE_VALUE (elt);
4523 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4524 tree index = TREE_PURPOSE (elt);
4525 rtx xtarget = target;
4527 if (cleared && is_zeros_p (value))
4528 continue;
4530 unsignedp = TREE_UNSIGNED (elttype);
4531 mode = TYPE_MODE (elttype);
4532 if (mode == BLKmode)
4533 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4534 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4535 : -1);
4536 else
4537 bitsize = GET_MODE_BITSIZE (mode);
4539 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4541 tree lo_index = TREE_OPERAND (index, 0);
4542 tree hi_index = TREE_OPERAND (index, 1);
4543 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4544 struct nesting *loop;
4545 HOST_WIDE_INT lo, hi, count;
4546 tree position;
4548 /* If the range is constant and "small", unroll the loop. */
4549 if (const_bounds_p
4550 && host_integerp (lo_index, 0)
4551 && host_integerp (hi_index, 0)
4552 && (lo = tree_low_cst (lo_index, 0),
4553 hi = tree_low_cst (hi_index, 0),
4554 count = hi - lo + 1,
4555 (GET_CODE (target) != MEM
4556 || count <= 2
4557 || (host_integerp (TYPE_SIZE (elttype), 1)
4558 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4559 <= 40 * 8)))))
4561 lo -= minelt; hi -= minelt;
4562 for (; lo <= hi; lo++)
4564 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4565 store_constructor_field
4566 (target, bitsize, bitpos, mode, value, type, align,
4567 cleared,
4568 TYPE_NONALIASED_COMPONENT (type)
4569 ? MEM_ALIAS_SET (target) : get_alias_set (elttype));
4572 else
4574 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4575 loop_top = gen_label_rtx ();
4576 loop_end = gen_label_rtx ();
4578 unsignedp = TREE_UNSIGNED (domain);
4580 index = build_decl (VAR_DECL, NULL_TREE, domain);
4582 DECL_RTL (index) = index_r
4583 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4584 &unsignedp, 0));
4586 if (TREE_CODE (value) == SAVE_EXPR
4587 && SAVE_EXPR_RTL (value) == 0)
4589 /* Make sure value gets expanded once before the
4590 loop. */
4591 expand_expr (value, const0_rtx, VOIDmode, 0);
4592 emit_queue ();
4594 store_expr (lo_index, index_r, 0);
4595 loop = expand_start_loop (0);
4597 /* Assign value to element index. */
4598 position
4599 = convert (ssizetype,
4600 fold (build (MINUS_EXPR, TREE_TYPE (index),
4601 index, TYPE_MIN_VALUE (domain))));
4602 position = size_binop (MULT_EXPR, position,
4603 convert (ssizetype,
4604 TYPE_SIZE_UNIT (elttype)));
4606 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4607 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4608 xtarget = change_address (target, mode, addr);
4609 if (TREE_CODE (value) == CONSTRUCTOR)
4610 store_constructor (value, xtarget, align, cleared,
4611 bitsize / BITS_PER_UNIT);
4612 else
4613 store_expr (value, xtarget, 0);
4615 expand_exit_loop_if_false (loop,
4616 build (LT_EXPR, integer_type_node,
4617 index, hi_index));
4619 expand_increment (build (PREINCREMENT_EXPR,
4620 TREE_TYPE (index),
4621 index, integer_one_node), 0, 0);
4622 expand_end_loop ();
4623 emit_label (loop_end);
4626 else if ((index != 0 && ! host_integerp (index, 0))
4627 || ! host_integerp (TYPE_SIZE (elttype), 1))
4629 rtx pos_rtx, addr;
4630 tree position;
4632 if (index == 0)
4633 index = ssize_int (1);
4635 if (minelt)
4636 index = convert (ssizetype,
4637 fold (build (MINUS_EXPR, index,
4638 TYPE_MIN_VALUE (domain))));
4640 position = size_binop (MULT_EXPR, index,
4641 convert (ssizetype,
4642 TYPE_SIZE_UNIT (elttype)));
4643 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4644 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4645 xtarget = change_address (target, mode, addr);
4646 store_expr (value, xtarget, 0);
4648 else
4650 if (index != 0)
4651 bitpos = ((tree_low_cst (index, 0) - minelt)
4652 * tree_low_cst (TYPE_SIZE (elttype), 1));
4653 else
4654 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4656 store_constructor_field (target, bitsize, bitpos, mode, value,
4657 type, align, cleared,
4658 TYPE_NONALIASED_COMPONENT (type)
4659 ? MEM_ALIAS_SET (target) :
4660 get_alias_set (elttype));
4666 /* Set constructor assignments. */
4667 else if (TREE_CODE (type) == SET_TYPE)
4669 tree elt = CONSTRUCTOR_ELTS (exp);
4670 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4671 tree domain = TYPE_DOMAIN (type);
4672 tree domain_min, domain_max, bitlength;
4674 /* The default implementation strategy is to extract the constant
4675 parts of the constructor, use that to initialize the target,
4676 and then "or" in whatever non-constant ranges we need in addition.
4678 If a large set is all zero or all ones, it is
4679 probably better to set it using memset (if available) or bzero.
4680 Also, if a large set has just a single range, it may also be
4681 better to first clear all the first clear the set (using
4682 bzero/memset), and set the bits we want. */
4684 /* Check for all zeros. */
4685 if (elt == NULL_TREE && size > 0)
4687 if (!cleared)
4688 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
4689 return;
4692 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4693 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4694 bitlength = size_binop (PLUS_EXPR,
4695 size_diffop (domain_max, domain_min),
4696 ssize_int (1));
4698 nbits = tree_low_cst (bitlength, 1);
4700 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4701 are "complicated" (more than one range), initialize (the
4702 constant parts) by copying from a constant. */
4703 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4704 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4706 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4707 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4708 char *bit_buffer = (char *) alloca (nbits);
4709 HOST_WIDE_INT word = 0;
4710 unsigned int bit_pos = 0;
4711 unsigned int ibit = 0;
4712 unsigned int offset = 0; /* In bytes from beginning of set. */
4714 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4715 for (;;)
4717 if (bit_buffer[ibit])
4719 if (BYTES_BIG_ENDIAN)
4720 word |= (1 << (set_word_size - 1 - bit_pos));
4721 else
4722 word |= 1 << bit_pos;
4725 bit_pos++; ibit++;
4726 if (bit_pos >= set_word_size || ibit == nbits)
4728 if (word != 0 || ! cleared)
4730 rtx datum = GEN_INT (word);
4731 rtx to_rtx;
4733 /* The assumption here is that it is safe to use
4734 XEXP if the set is multi-word, but not if
4735 it's single-word. */
4736 if (GET_CODE (target) == MEM)
4738 to_rtx = plus_constant (XEXP (target, 0), offset);
4739 to_rtx = change_address (target, mode, to_rtx);
4741 else if (offset == 0)
4742 to_rtx = target;
4743 else
4744 abort ();
4745 emit_move_insn (to_rtx, datum);
4748 if (ibit == nbits)
4749 break;
4750 word = 0;
4751 bit_pos = 0;
4752 offset += set_word_size / BITS_PER_UNIT;
4756 else if (!cleared)
4757 /* Don't bother clearing storage if the set is all ones. */
4758 if (TREE_CHAIN (elt) != NULL_TREE
4759 || (TREE_PURPOSE (elt) == NULL_TREE
4760 ? nbits != 1
4761 : ( ! host_integerp (TREE_VALUE (elt), 0)
4762 || ! host_integerp (TREE_PURPOSE (elt), 0)
4763 || (tree_low_cst (TREE_VALUE (elt), 0)
4764 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4765 != (HOST_WIDE_INT) nbits))))
4766 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4768 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4770 /* Start of range of element or NULL. */
4771 tree startbit = TREE_PURPOSE (elt);
4772 /* End of range of element, or element value. */
4773 tree endbit = TREE_VALUE (elt);
4774 #ifdef TARGET_MEM_FUNCTIONS
4775 HOST_WIDE_INT startb, endb;
4776 #endif
4777 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4779 bitlength_rtx = expand_expr (bitlength,
4780 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4782 /* Handle non-range tuple element like [ expr ]. */
4783 if (startbit == NULL_TREE)
4785 startbit = save_expr (endbit);
4786 endbit = startbit;
4789 startbit = convert (sizetype, startbit);
4790 endbit = convert (sizetype, endbit);
4791 if (! integer_zerop (domain_min))
4793 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4794 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4796 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4797 EXPAND_CONST_ADDRESS);
4798 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4799 EXPAND_CONST_ADDRESS);
4801 if (REG_P (target))
4803 targetx = assign_stack_temp (GET_MODE (target),
4804 GET_MODE_SIZE (GET_MODE (target)),
4806 emit_move_insn (targetx, target);
4809 else if (GET_CODE (target) == MEM)
4810 targetx = target;
4811 else
4812 abort ();
4814 #ifdef TARGET_MEM_FUNCTIONS
4815 /* Optimization: If startbit and endbit are
4816 constants divisible by BITS_PER_UNIT,
4817 call memset instead. */
4818 if (TREE_CODE (startbit) == INTEGER_CST
4819 && TREE_CODE (endbit) == INTEGER_CST
4820 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4821 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4823 emit_library_call (memset_libfunc, LCT_NORMAL,
4824 VOIDmode, 3,
4825 plus_constant (XEXP (targetx, 0),
4826 startb / BITS_PER_UNIT),
4827 Pmode,
4828 constm1_rtx, TYPE_MODE (integer_type_node),
4829 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4830 TYPE_MODE (sizetype));
4832 else
4833 #endif
4834 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4835 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
4836 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
4837 startbit_rtx, TYPE_MODE (sizetype),
4838 endbit_rtx, TYPE_MODE (sizetype));
4840 if (REG_P (target))
4841 emit_move_insn (target, targetx);
4845 else
4846 abort ();
4849 /* Store the value of EXP (an expression tree)
4850 into a subfield of TARGET which has mode MODE and occupies
4851 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4852 If MODE is VOIDmode, it means that we are storing into a bit-field.
4854 If VALUE_MODE is VOIDmode, return nothing in particular.
4855 UNSIGNEDP is not used in this case.
4857 Otherwise, return an rtx for the value stored. This rtx
4858 has mode VALUE_MODE if that is convenient to do.
4859 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4861 ALIGN is the alignment that TARGET is known to have.
4862 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4864 ALIAS_SET is the alias set for the destination. This value will
4865 (in general) be different from that for TARGET, since TARGET is a
4866 reference to the containing structure. */
4868 static rtx
4869 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4870 unsignedp, align, total_size, alias_set)
4871 rtx target;
4872 HOST_WIDE_INT bitsize;
4873 HOST_WIDE_INT bitpos;
4874 enum machine_mode mode;
4875 tree exp;
4876 enum machine_mode value_mode;
4877 int unsignedp;
4878 unsigned int align;
4879 HOST_WIDE_INT total_size;
4880 int alias_set;
4882 HOST_WIDE_INT width_mask = 0;
4884 if (TREE_CODE (exp) == ERROR_MARK)
4885 return const0_rtx;
4887 if (bitsize < HOST_BITS_PER_WIDE_INT)
4888 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4890 /* If we are storing into an unaligned field of an aligned union that is
4891 in a register, we may have the mode of TARGET being an integer mode but
4892 MODE == BLKmode. In that case, get an aligned object whose size and
4893 alignment are the same as TARGET and store TARGET into it (we can avoid
4894 the store if the field being stored is the entire width of TARGET). Then
4895 call ourselves recursively to store the field into a BLKmode version of
4896 that object. Finally, load from the object into TARGET. This is not
4897 very efficient in general, but should only be slightly more expensive
4898 than the otherwise-required unaligned accesses. Perhaps this can be
4899 cleaned up later. */
4901 if (mode == BLKmode
4902 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4904 rtx object = assign_stack_temp (GET_MODE (target),
4905 GET_MODE_SIZE (GET_MODE (target)), 0);
4906 rtx blk_object = copy_rtx (object);
4908 MEM_SET_IN_STRUCT_P (object, 1);
4909 MEM_SET_IN_STRUCT_P (blk_object, 1);
4910 PUT_MODE (blk_object, BLKmode);
4912 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4913 emit_move_insn (object, target);
4915 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4916 align, total_size, alias_set);
4918 /* Even though we aren't returning target, we need to
4919 give it the updated value. */
4920 emit_move_insn (target, object);
4922 return blk_object;
4925 if (GET_CODE (target) == CONCAT)
4927 /* We're storing into a struct containing a single __complex. */
4929 if (bitpos != 0)
4930 abort ();
4931 return store_expr (exp, target, 0);
4934 /* If the structure is in a register or if the component
4935 is a bit field, we cannot use addressing to access it.
4936 Use bit-field techniques or SUBREG to store in it. */
4938 if (mode == VOIDmode
4939 || (mode != BLKmode && ! direct_store[(int) mode]
4940 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4941 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4942 || GET_CODE (target) == REG
4943 || GET_CODE (target) == SUBREG
4944 /* If the field isn't aligned enough to store as an ordinary memref,
4945 store it as a bit field. */
4946 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4947 && (align < GET_MODE_ALIGNMENT (mode)
4948 || bitpos % GET_MODE_ALIGNMENT (mode)))
4949 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4950 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
4951 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
4952 /* If the RHS and field are a constant size and the size of the
4953 RHS isn't the same size as the bitfield, we must use bitfield
4954 operations. */
4955 || (bitsize >= 0
4956 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
4957 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
4959 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4961 /* If BITSIZE is narrower than the size of the type of EXP
4962 we will be narrowing TEMP. Normally, what's wanted are the
4963 low-order bits. However, if EXP's type is a record and this is
4964 big-endian machine, we want the upper BITSIZE bits. */
4965 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4966 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4967 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4968 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4969 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4970 - bitsize),
4971 temp, 1);
4973 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4974 MODE. */
4975 if (mode != VOIDmode && mode != BLKmode
4976 && mode != TYPE_MODE (TREE_TYPE (exp)))
4977 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4979 /* If the modes of TARGET and TEMP are both BLKmode, both
4980 must be in memory and BITPOS must be aligned on a byte
4981 boundary. If so, we simply do a block copy. */
4982 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4984 unsigned int exp_align = expr_align (exp);
4986 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4987 || bitpos % BITS_PER_UNIT != 0)
4988 abort ();
4990 target = change_address (target, VOIDmode,
4991 plus_constant (XEXP (target, 0),
4992 bitpos / BITS_PER_UNIT));
4994 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
4995 align = MIN (exp_align, align);
4997 /* Find an alignment that is consistent with the bit position. */
4998 while ((bitpos % align) != 0)
4999 align >>= 1;
5001 emit_block_move (target, temp,
5002 bitsize == -1 ? expr_size (exp)
5003 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5004 / BITS_PER_UNIT),
5005 align);
5007 return value_mode == VOIDmode ? const0_rtx : target;
5010 /* Store the value in the bitfield. */
5011 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
5012 if (value_mode != VOIDmode)
5014 /* The caller wants an rtx for the value. */
5015 /* If possible, avoid refetching from the bitfield itself. */
5016 if (width_mask != 0
5017 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5019 tree count;
5020 enum machine_mode tmode;
5022 if (unsignedp)
5023 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
5024 tmode = GET_MODE (temp);
5025 if (tmode == VOIDmode)
5026 tmode = value_mode;
5027 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5028 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5029 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5031 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5032 NULL_RTX, value_mode, 0, align,
5033 total_size);
5035 return const0_rtx;
5037 else
5039 rtx addr = XEXP (target, 0);
5040 rtx to_rtx;
5042 /* If a value is wanted, it must be the lhs;
5043 so make the address stable for multiple use. */
5045 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5046 && ! CONSTANT_ADDRESS_P (addr)
5047 /* A frame-pointer reference is already stable. */
5048 && ! (GET_CODE (addr) == PLUS
5049 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5050 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5051 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5052 addr = copy_to_reg (addr);
5054 /* Now build a reference to just the desired component. */
5056 to_rtx = copy_rtx (change_address (target, mode,
5057 plus_constant (addr,
5058 (bitpos
5059 / BITS_PER_UNIT))));
5060 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5061 MEM_ALIAS_SET (to_rtx) = alias_set;
5063 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5067 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5068 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
5069 ARRAY_REFs and find the ultimate containing object, which we return.
5071 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5072 bit position, and *PUNSIGNEDP to the signedness of the field.
5073 If the position of the field is variable, we store a tree
5074 giving the variable offset (in units) in *POFFSET.
5075 This offset is in addition to the bit position.
5076 If the position is not variable, we store 0 in *POFFSET.
5077 We set *PALIGNMENT to the alignment of the address that will be
5078 computed. This is the alignment of the thing we return if *POFFSET
5079 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5081 If any of the extraction expressions is volatile,
5082 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5084 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5085 is a mode that can be used to access the field. In that case, *PBITSIZE
5086 is redundant.
5088 If the field describes a variable-sized object, *PMODE is set to
5089 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5090 this case, but the address of the object can be found. */
5092 tree
5093 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5094 punsignedp, pvolatilep, palignment)
5095 tree exp;
5096 HOST_WIDE_INT *pbitsize;
5097 HOST_WIDE_INT *pbitpos;
5098 tree *poffset;
5099 enum machine_mode *pmode;
5100 int *punsignedp;
5101 int *pvolatilep;
5102 unsigned int *palignment;
5104 tree size_tree = 0;
5105 enum machine_mode mode = VOIDmode;
5106 tree offset = size_zero_node;
5107 tree bit_offset = bitsize_zero_node;
5108 unsigned int alignment = BIGGEST_ALIGNMENT;
5109 tree tem;
5111 /* First get the mode, signedness, and size. We do this from just the
5112 outermost expression. */
5113 if (TREE_CODE (exp) == COMPONENT_REF)
5115 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5116 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5117 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5119 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5121 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5123 size_tree = TREE_OPERAND (exp, 1);
5124 *punsignedp = TREE_UNSIGNED (exp);
5126 else
5128 mode = TYPE_MODE (TREE_TYPE (exp));
5129 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5131 if (mode == BLKmode)
5132 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5133 else
5134 *pbitsize = GET_MODE_BITSIZE (mode);
5137 if (size_tree != 0)
5139 if (! host_integerp (size_tree, 1))
5140 mode = BLKmode, *pbitsize = -1;
5141 else
5142 *pbitsize = tree_low_cst (size_tree, 1);
5145 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5146 and find the ultimate containing object. */
5147 while (1)
5149 if (TREE_CODE (exp) == BIT_FIELD_REF)
5150 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5151 else if (TREE_CODE (exp) == COMPONENT_REF)
5153 tree field = TREE_OPERAND (exp, 1);
5154 tree this_offset = DECL_FIELD_OFFSET (field);
5156 /* If this field hasn't been filled in yet, don't go
5157 past it. This should only happen when folding expressions
5158 made during type construction. */
5159 if (this_offset == 0)
5160 break;
5161 else if (! TREE_CONSTANT (this_offset)
5162 && contains_placeholder_p (this_offset))
5163 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5165 offset = size_binop (PLUS_EXPR, offset, this_offset);
5166 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5167 DECL_FIELD_BIT_OFFSET (field));
5169 if (! host_integerp (offset, 0))
5170 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5173 else if (TREE_CODE (exp) == ARRAY_REF)
5175 tree index = TREE_OPERAND (exp, 1);
5176 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5177 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5178 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (exp));
5180 /* We assume all arrays have sizes that are a multiple of a byte.
5181 First subtract the lower bound, if any, in the type of the
5182 index, then convert to sizetype and multiply by the size of the
5183 array element. */
5184 if (low_bound != 0 && ! integer_zerop (low_bound))
5185 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5186 index, low_bound));
5188 /* If the index has a self-referential type, pass it to a
5189 WITH_RECORD_EXPR; if the component size is, pass our
5190 component to one. */
5191 if (! TREE_CONSTANT (index)
5192 && contains_placeholder_p (index))
5193 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5194 if (! TREE_CONSTANT (unit_size)
5195 && contains_placeholder_p (unit_size))
5196 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size,
5197 TREE_OPERAND (exp, 0));
5199 offset = size_binop (PLUS_EXPR, offset,
5200 size_binop (MULT_EXPR,
5201 convert (sizetype, index),
5202 unit_size));
5205 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5206 && ! ((TREE_CODE (exp) == NOP_EXPR
5207 || TREE_CODE (exp) == CONVERT_EXPR)
5208 && (TYPE_MODE (TREE_TYPE (exp))
5209 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5210 break;
5212 /* If any reference in the chain is volatile, the effect is volatile. */
5213 if (TREE_THIS_VOLATILE (exp))
5214 *pvolatilep = 1;
5216 /* If the offset is non-constant already, then we can't assume any
5217 alignment more than the alignment here. */
5218 if (! TREE_CONSTANT (offset))
5219 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5221 exp = TREE_OPERAND (exp, 0);
5224 if (DECL_P (exp))
5225 alignment = MIN (alignment, DECL_ALIGN (exp));
5226 else if (TREE_TYPE (exp) != 0)
5227 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5229 /* If OFFSET is constant, see if we can return the whole thing as a
5230 constant bit position. Otherwise, split it up. */
5231 if (host_integerp (offset, 0)
5232 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5233 bitsize_unit_node))
5234 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5235 && host_integerp (tem, 0))
5236 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5237 else
5238 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5240 *pmode = mode;
5241 *palignment = alignment;
5242 return exp;
5245 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5247 static enum memory_use_mode
5248 get_memory_usage_from_modifier (modifier)
5249 enum expand_modifier modifier;
5251 switch (modifier)
5253 case EXPAND_NORMAL:
5254 case EXPAND_SUM:
5255 return MEMORY_USE_RO;
5256 break;
5257 case EXPAND_MEMORY_USE_WO:
5258 return MEMORY_USE_WO;
5259 break;
5260 case EXPAND_MEMORY_USE_RW:
5261 return MEMORY_USE_RW;
5262 break;
5263 case EXPAND_MEMORY_USE_DONT:
5264 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5265 MEMORY_USE_DONT, because they are modifiers to a call of
5266 expand_expr in the ADDR_EXPR case of expand_expr. */
5267 case EXPAND_CONST_ADDRESS:
5268 case EXPAND_INITIALIZER:
5269 return MEMORY_USE_DONT;
5270 case EXPAND_MEMORY_USE_BAD:
5271 default:
5272 abort ();
5276 /* Given an rtx VALUE that may contain additions and multiplications,
5277 return an equivalent value that just refers to a register or memory.
5278 This is done by generating instructions to perform the arithmetic
5279 and returning a pseudo-register containing the value.
5281 The returned value may be a REG, SUBREG, MEM or constant. */
5284 force_operand (value, target)
5285 rtx value, target;
5287 register optab binoptab = 0;
5288 /* Use a temporary to force order of execution of calls to
5289 `force_operand'. */
5290 rtx tmp;
5291 register rtx op2;
5292 /* Use subtarget as the target for operand 0 of a binary operation. */
5293 register rtx subtarget = get_subtarget (target);
5295 /* Check for a PIC address load. */
5296 if (flag_pic
5297 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5298 && XEXP (value, 0) == pic_offset_table_rtx
5299 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5300 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5301 || GET_CODE (XEXP (value, 1)) == CONST))
5303 if (!subtarget)
5304 subtarget = gen_reg_rtx (GET_MODE (value));
5305 emit_move_insn (subtarget, value);
5306 return subtarget;
5309 if (GET_CODE (value) == PLUS)
5310 binoptab = add_optab;
5311 else if (GET_CODE (value) == MINUS)
5312 binoptab = sub_optab;
5313 else if (GET_CODE (value) == MULT)
5315 op2 = XEXP (value, 1);
5316 if (!CONSTANT_P (op2)
5317 && !(GET_CODE (op2) == REG && op2 != subtarget))
5318 subtarget = 0;
5319 tmp = force_operand (XEXP (value, 0), subtarget);
5320 return expand_mult (GET_MODE (value), tmp,
5321 force_operand (op2, NULL_RTX),
5322 target, 0);
5325 if (binoptab)
5327 op2 = XEXP (value, 1);
5328 if (!CONSTANT_P (op2)
5329 && !(GET_CODE (op2) == REG && op2 != subtarget))
5330 subtarget = 0;
5331 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5333 binoptab = add_optab;
5334 op2 = negate_rtx (GET_MODE (value), op2);
5337 /* Check for an addition with OP2 a constant integer and our first
5338 operand a PLUS of a virtual register and something else. In that
5339 case, we want to emit the sum of the virtual register and the
5340 constant first and then add the other value. This allows virtual
5341 register instantiation to simply modify the constant rather than
5342 creating another one around this addition. */
5343 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5344 && GET_CODE (XEXP (value, 0)) == PLUS
5345 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5346 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5347 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5349 rtx temp = expand_binop (GET_MODE (value), binoptab,
5350 XEXP (XEXP (value, 0), 0), op2,
5351 subtarget, 0, OPTAB_LIB_WIDEN);
5352 return expand_binop (GET_MODE (value), binoptab, temp,
5353 force_operand (XEXP (XEXP (value, 0), 1), 0),
5354 target, 0, OPTAB_LIB_WIDEN);
5357 tmp = force_operand (XEXP (value, 0), subtarget);
5358 return expand_binop (GET_MODE (value), binoptab, tmp,
5359 force_operand (op2, NULL_RTX),
5360 target, 0, OPTAB_LIB_WIDEN);
5361 /* We give UNSIGNEDP = 0 to expand_binop
5362 because the only operations we are expanding here are signed ones. */
5364 return value;
5367 /* Subroutine of expand_expr:
5368 save the non-copied parts (LIST) of an expr (LHS), and return a list
5369 which can restore these values to their previous values,
5370 should something modify their storage. */
5372 static tree
5373 save_noncopied_parts (lhs, list)
5374 tree lhs;
5375 tree list;
5377 tree tail;
5378 tree parts = 0;
5380 for (tail = list; tail; tail = TREE_CHAIN (tail))
5381 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5382 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5383 else
5385 tree part = TREE_VALUE (tail);
5386 tree part_type = TREE_TYPE (part);
5387 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5388 rtx target = assign_temp (part_type, 0, 1, 1);
5389 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5390 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5391 parts = tree_cons (to_be_saved,
5392 build (RTL_EXPR, part_type, NULL_TREE,
5393 (tree) target),
5394 parts);
5395 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5397 return parts;
5400 /* Subroutine of expand_expr:
5401 record the non-copied parts (LIST) of an expr (LHS), and return a list
5402 which specifies the initial values of these parts. */
5404 static tree
5405 init_noncopied_parts (lhs, list)
5406 tree lhs;
5407 tree list;
5409 tree tail;
5410 tree parts = 0;
5412 for (tail = list; tail; tail = TREE_CHAIN (tail))
5413 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5414 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5415 else if (TREE_PURPOSE (tail))
5417 tree part = TREE_VALUE (tail);
5418 tree part_type = TREE_TYPE (part);
5419 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5420 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5422 return parts;
5425 /* Subroutine of expand_expr: return nonzero iff there is no way that
5426 EXP can reference X, which is being modified. TOP_P is nonzero if this
5427 call is going to be used to determine whether we need a temporary
5428 for EXP, as opposed to a recursive call to this function.
5430 It is always safe for this routine to return zero since it merely
5431 searches for optimization opportunities. */
5434 safe_from_p (x, exp, top_p)
5435 rtx x;
5436 tree exp;
5437 int top_p;
5439 rtx exp_rtl = 0;
5440 int i, nops;
5441 static int save_expr_count;
5442 static int save_expr_size = 0;
5443 static tree *save_expr_rewritten;
5444 static tree save_expr_trees[256];
5446 if (x == 0
5447 /* If EXP has varying size, we MUST use a target since we currently
5448 have no way of allocating temporaries of variable size
5449 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5450 So we assume here that something at a higher level has prevented a
5451 clash. This is somewhat bogus, but the best we can do. Only
5452 do this when X is BLKmode and when we are at the top level. */
5453 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5454 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5455 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5456 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5457 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5458 != INTEGER_CST)
5459 && GET_MODE (x) == BLKmode))
5460 return 1;
5462 if (top_p && save_expr_size == 0)
5464 int rtn;
5466 save_expr_count = 0;
5467 save_expr_size = ARRAY_SIZE (save_expr_trees);
5468 save_expr_rewritten = &save_expr_trees[0];
5470 rtn = safe_from_p (x, exp, 1);
5472 for (i = 0; i < save_expr_count; ++i)
5474 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5475 abort ();
5476 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5479 save_expr_size = 0;
5481 return rtn;
5484 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5485 find the underlying pseudo. */
5486 if (GET_CODE (x) == SUBREG)
5488 x = SUBREG_REG (x);
5489 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5490 return 0;
5493 /* If X is a location in the outgoing argument area, it is always safe. */
5494 if (GET_CODE (x) == MEM
5495 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5496 || (GET_CODE (XEXP (x, 0)) == PLUS
5497 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5498 return 1;
5500 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5502 case 'd':
5503 exp_rtl = DECL_RTL (exp);
5504 break;
5506 case 'c':
5507 return 1;
5509 case 'x':
5510 if (TREE_CODE (exp) == TREE_LIST)
5511 return ((TREE_VALUE (exp) == 0
5512 || safe_from_p (x, TREE_VALUE (exp), 0))
5513 && (TREE_CHAIN (exp) == 0
5514 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5515 else if (TREE_CODE (exp) == ERROR_MARK)
5516 return 1; /* An already-visited SAVE_EXPR? */
5517 else
5518 return 0;
5520 case '1':
5521 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5523 case '2':
5524 case '<':
5525 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5526 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5528 case 'e':
5529 case 'r':
5530 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5531 the expression. If it is set, we conflict iff we are that rtx or
5532 both are in memory. Otherwise, we check all operands of the
5533 expression recursively. */
5535 switch (TREE_CODE (exp))
5537 case ADDR_EXPR:
5538 return (staticp (TREE_OPERAND (exp, 0))
5539 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5540 || TREE_STATIC (exp));
5542 case INDIRECT_REF:
5543 if (GET_CODE (x) == MEM)
5544 return 0;
5545 break;
5547 case CALL_EXPR:
5548 exp_rtl = CALL_EXPR_RTL (exp);
5549 if (exp_rtl == 0)
5551 /* Assume that the call will clobber all hard registers and
5552 all of memory. */
5553 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5554 || GET_CODE (x) == MEM)
5555 return 0;
5558 break;
5560 case RTL_EXPR:
5561 /* If a sequence exists, we would have to scan every instruction
5562 in the sequence to see if it was safe. This is probably not
5563 worthwhile. */
5564 if (RTL_EXPR_SEQUENCE (exp))
5565 return 0;
5567 exp_rtl = RTL_EXPR_RTL (exp);
5568 break;
5570 case WITH_CLEANUP_EXPR:
5571 exp_rtl = RTL_EXPR_RTL (exp);
5572 break;
5574 case CLEANUP_POINT_EXPR:
5575 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5577 case SAVE_EXPR:
5578 exp_rtl = SAVE_EXPR_RTL (exp);
5579 if (exp_rtl)
5580 break;
5582 /* This SAVE_EXPR might appear many times in the top-level
5583 safe_from_p() expression, and if it has a complex
5584 subexpression, examining it multiple times could result
5585 in a combinatorial explosion. E.g. on an Alpha
5586 running at least 200MHz, a Fortran test case compiled with
5587 optimization took about 28 minutes to compile -- even though
5588 it was only a few lines long, and the complicated line causing
5589 so much time to be spent in the earlier version of safe_from_p()
5590 had only 293 or so unique nodes.
5592 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5593 where it is so we can turn it back in the top-level safe_from_p()
5594 when we're done. */
5596 /* For now, don't bother re-sizing the array. */
5597 if (save_expr_count >= save_expr_size)
5598 return 0;
5599 save_expr_rewritten[save_expr_count++] = exp;
5601 nops = TREE_CODE_LENGTH (SAVE_EXPR);
5602 for (i = 0; i < nops; i++)
5604 tree operand = TREE_OPERAND (exp, i);
5605 if (operand == NULL_TREE)
5606 continue;
5607 TREE_SET_CODE (exp, ERROR_MARK);
5608 if (!safe_from_p (x, operand, 0))
5609 return 0;
5610 TREE_SET_CODE (exp, SAVE_EXPR);
5612 TREE_SET_CODE (exp, ERROR_MARK);
5613 return 1;
5615 case BIND_EXPR:
5616 /* The only operand we look at is operand 1. The rest aren't
5617 part of the expression. */
5618 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5620 case METHOD_CALL_EXPR:
5621 /* This takes a rtx argument, but shouldn't appear here. */
5622 abort ();
5624 default:
5625 break;
5628 /* If we have an rtx, we do not need to scan our operands. */
5629 if (exp_rtl)
5630 break;
5632 nops = first_rtl_op (TREE_CODE (exp));
5633 for (i = 0; i < nops; i++)
5634 if (TREE_OPERAND (exp, i) != 0
5635 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5636 return 0;
5638 /* If this is a language-specific tree code, it may require
5639 special handling. */
5640 if (TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE
5641 && lang_safe_from_p
5642 && !(*lang_safe_from_p) (x, exp))
5643 return 0;
5646 /* If we have an rtl, find any enclosed object. Then see if we conflict
5647 with it. */
5648 if (exp_rtl)
5650 if (GET_CODE (exp_rtl) == SUBREG)
5652 exp_rtl = SUBREG_REG (exp_rtl);
5653 if (GET_CODE (exp_rtl) == REG
5654 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5655 return 0;
5658 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5659 are memory and EXP is not readonly. */
5660 return ! (rtx_equal_p (x, exp_rtl)
5661 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5662 && ! TREE_READONLY (exp)));
5665 /* If we reach here, it is safe. */
5666 return 1;
5669 /* Subroutine of expand_expr: return nonzero iff EXP is an
5670 expression whose type is statically determinable. */
5672 static int
5673 fixed_type_p (exp)
5674 tree exp;
5676 if (TREE_CODE (exp) == PARM_DECL
5677 || TREE_CODE (exp) == VAR_DECL
5678 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5679 || TREE_CODE (exp) == COMPONENT_REF
5680 || TREE_CODE (exp) == ARRAY_REF)
5681 return 1;
5682 return 0;
5685 /* Subroutine of expand_expr: return rtx if EXP is a
5686 variable or parameter; else return 0. */
5688 static rtx
5689 var_rtx (exp)
5690 tree exp;
5692 STRIP_NOPS (exp);
5693 switch (TREE_CODE (exp))
5695 case PARM_DECL:
5696 case VAR_DECL:
5697 return DECL_RTL (exp);
5698 default:
5699 return 0;
5703 #ifdef MAX_INTEGER_COMPUTATION_MODE
5704 void
5705 check_max_integer_computation_mode (exp)
5706 tree exp;
5708 enum tree_code code;
5709 enum machine_mode mode;
5711 /* Strip any NOPs that don't change the mode. */
5712 STRIP_NOPS (exp);
5713 code = TREE_CODE (exp);
5715 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5716 if (code == NOP_EXPR
5717 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5718 return;
5720 /* First check the type of the overall operation. We need only look at
5721 unary, binary and relational operations. */
5722 if (TREE_CODE_CLASS (code) == '1'
5723 || TREE_CODE_CLASS (code) == '2'
5724 || TREE_CODE_CLASS (code) == '<')
5726 mode = TYPE_MODE (TREE_TYPE (exp));
5727 if (GET_MODE_CLASS (mode) == MODE_INT
5728 && mode > MAX_INTEGER_COMPUTATION_MODE)
5729 fatal ("unsupported wide integer operation");
5732 /* Check operand of a unary op. */
5733 if (TREE_CODE_CLASS (code) == '1')
5735 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5736 if (GET_MODE_CLASS (mode) == MODE_INT
5737 && mode > MAX_INTEGER_COMPUTATION_MODE)
5738 fatal ("unsupported wide integer operation");
5741 /* Check operands of a binary/comparison op. */
5742 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5744 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5745 if (GET_MODE_CLASS (mode) == MODE_INT
5746 && mode > MAX_INTEGER_COMPUTATION_MODE)
5747 fatal ("unsupported wide integer operation");
5749 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5750 if (GET_MODE_CLASS (mode) == MODE_INT
5751 && mode > MAX_INTEGER_COMPUTATION_MODE)
5752 fatal ("unsupported wide integer operation");
5755 #endif
5757 /* Utility function used by expand_expr to see if TYPE, a RECORD_TYPE,
5758 has any readonly fields. If any of the fields have types that
5759 contain readonly fields, return true as well. */
5761 static int
5762 readonly_fields_p (type)
5763 tree type;
5765 tree field;
5767 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
5768 if (TREE_CODE (field) == FIELD_DECL
5769 && (TREE_READONLY (field)
5770 || (TREE_CODE (TREE_TYPE (field)) == RECORD_TYPE
5771 && readonly_fields_p (TREE_TYPE (field)))))
5772 return 1;
5774 return 0;
5777 /* expand_expr: generate code for computing expression EXP.
5778 An rtx for the computed value is returned. The value is never null.
5779 In the case of a void EXP, const0_rtx is returned.
5781 The value may be stored in TARGET if TARGET is nonzero.
5782 TARGET is just a suggestion; callers must assume that
5783 the rtx returned may not be the same as TARGET.
5785 If TARGET is CONST0_RTX, it means that the value will be ignored.
5787 If TMODE is not VOIDmode, it suggests generating the
5788 result in mode TMODE. But this is done only when convenient.
5789 Otherwise, TMODE is ignored and the value generated in its natural mode.
5790 TMODE is just a suggestion; callers must assume that
5791 the rtx returned may not have mode TMODE.
5793 Note that TARGET may have neither TMODE nor MODE. In that case, it
5794 probably will not be used.
5796 If MODIFIER is EXPAND_SUM then when EXP is an addition
5797 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5798 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5799 products as above, or REG or MEM, or constant.
5800 Ordinarily in such cases we would output mul or add instructions
5801 and then return a pseudo reg containing the sum.
5803 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5804 it also marks a label as absolutely required (it can't be dead).
5805 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5806 This is used for outputting expressions used in initializers.
5808 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5809 with a constant address even if that address is not normally legitimate.
5810 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5813 expand_expr (exp, target, tmode, modifier)
5814 register tree exp;
5815 rtx target;
5816 enum machine_mode tmode;
5817 enum expand_modifier modifier;
5819 register rtx op0, op1, temp;
5820 tree type = TREE_TYPE (exp);
5821 int unsignedp = TREE_UNSIGNED (type);
5822 register enum machine_mode mode;
5823 register enum tree_code code = TREE_CODE (exp);
5824 optab this_optab;
5825 rtx subtarget, original_target;
5826 int ignore;
5827 tree context;
5828 /* Used by check-memory-usage to make modifier read only. */
5829 enum expand_modifier ro_modifier;
5831 /* Handle ERROR_MARK before anybody tries to access its type. */
5832 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
5834 op0 = CONST0_RTX (tmode);
5835 if (op0 != 0)
5836 return op0;
5837 return const0_rtx;
5840 mode = TYPE_MODE (type);
5841 /* Use subtarget as the target for operand 0 of a binary operation. */
5842 subtarget = get_subtarget (target);
5843 original_target = target;
5844 ignore = (target == const0_rtx
5845 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5846 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5847 || code == COND_EXPR)
5848 && TREE_CODE (type) == VOID_TYPE));
5850 /* Make a read-only version of the modifier. */
5851 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5852 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5853 ro_modifier = modifier;
5854 else
5855 ro_modifier = EXPAND_NORMAL;
5857 /* If we are going to ignore this result, we need only do something
5858 if there is a side-effect somewhere in the expression. If there
5859 is, short-circuit the most common cases here. Note that we must
5860 not call expand_expr with anything but const0_rtx in case this
5861 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5863 if (ignore)
5865 if (! TREE_SIDE_EFFECTS (exp))
5866 return const0_rtx;
5868 /* Ensure we reference a volatile object even if value is ignored, but
5869 don't do this if all we are doing is taking its address. */
5870 if (TREE_THIS_VOLATILE (exp)
5871 && TREE_CODE (exp) != FUNCTION_DECL
5872 && mode != VOIDmode && mode != BLKmode
5873 && modifier != EXPAND_CONST_ADDRESS)
5875 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5876 if (GET_CODE (temp) == MEM)
5877 temp = copy_to_reg (temp);
5878 return const0_rtx;
5881 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
5882 || code == INDIRECT_REF || code == BUFFER_REF)
5883 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5884 VOIDmode, ro_modifier);
5885 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
5886 || code == ARRAY_REF)
5888 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5889 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5890 return const0_rtx;
5892 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5893 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5894 /* If the second operand has no side effects, just evaluate
5895 the first. */
5896 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5897 VOIDmode, ro_modifier);
5898 else if (code == BIT_FIELD_REF)
5900 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5901 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5902 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
5903 return const0_rtx;
5906 target = 0;
5909 #ifdef MAX_INTEGER_COMPUTATION_MODE
5910 /* Only check stuff here if the mode we want is different from the mode
5911 of the expression; if it's the same, check_max_integer_computiation_mode
5912 will handle it. Do we really need to check this stuff at all? */
5914 if (target
5915 && GET_MODE (target) != mode
5916 && TREE_CODE (exp) != INTEGER_CST
5917 && TREE_CODE (exp) != PARM_DECL
5918 && TREE_CODE (exp) != ARRAY_REF
5919 && TREE_CODE (exp) != COMPONENT_REF
5920 && TREE_CODE (exp) != BIT_FIELD_REF
5921 && TREE_CODE (exp) != INDIRECT_REF
5922 && TREE_CODE (exp) != CALL_EXPR
5923 && TREE_CODE (exp) != VAR_DECL
5924 && TREE_CODE (exp) != RTL_EXPR)
5926 enum machine_mode mode = GET_MODE (target);
5928 if (GET_MODE_CLASS (mode) == MODE_INT
5929 && mode > MAX_INTEGER_COMPUTATION_MODE)
5930 fatal ("unsupported wide integer operation");
5933 if (tmode != mode
5934 && TREE_CODE (exp) != INTEGER_CST
5935 && TREE_CODE (exp) != PARM_DECL
5936 && TREE_CODE (exp) != ARRAY_REF
5937 && TREE_CODE (exp) != COMPONENT_REF
5938 && TREE_CODE (exp) != BIT_FIELD_REF
5939 && TREE_CODE (exp) != INDIRECT_REF
5940 && TREE_CODE (exp) != VAR_DECL
5941 && TREE_CODE (exp) != CALL_EXPR
5942 && TREE_CODE (exp) != RTL_EXPR
5943 && GET_MODE_CLASS (tmode) == MODE_INT
5944 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5945 fatal ("unsupported wide integer operation");
5947 check_max_integer_computation_mode (exp);
5948 #endif
5950 /* If will do cse, generate all results into pseudo registers
5951 since 1) that allows cse to find more things
5952 and 2) otherwise cse could produce an insn the machine
5953 cannot support. */
5955 if (! cse_not_expected && mode != BLKmode && target
5956 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5957 target = subtarget;
5959 switch (code)
5961 case LABEL_DECL:
5963 tree function = decl_function_context (exp);
5964 /* Handle using a label in a containing function. */
5965 if (function != current_function_decl
5966 && function != inline_function_decl && function != 0)
5968 struct function *p = find_function_data (function);
5969 /* Allocate in the memory associated with the function
5970 that the label is in. */
5971 push_obstacks (p->function_obstack,
5972 p->function_maybepermanent_obstack);
5974 p->expr->x_forced_labels
5975 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
5976 p->expr->x_forced_labels);
5977 pop_obstacks ();
5979 else
5981 if (modifier == EXPAND_INITIALIZER)
5982 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5983 label_rtx (exp),
5984 forced_labels);
5987 temp = gen_rtx_MEM (FUNCTION_MODE,
5988 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5989 if (function != current_function_decl
5990 && function != inline_function_decl && function != 0)
5991 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5992 return temp;
5995 case PARM_DECL:
5996 if (DECL_RTL (exp) == 0)
5998 error_with_decl (exp, "prior parameter's size depends on `%s'");
5999 return CONST0_RTX (mode);
6002 /* ... fall through ... */
6004 case VAR_DECL:
6005 /* If a static var's type was incomplete when the decl was written,
6006 but the type is complete now, lay out the decl now. */
6007 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6008 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6010 push_obstacks_nochange ();
6011 end_temporary_allocation ();
6012 layout_decl (exp, 0);
6013 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
6014 pop_obstacks ();
6017 /* Although static-storage variables start off initialized, according to
6018 ANSI C, a memcpy could overwrite them with uninitialized values. So
6019 we check them too. This also lets us check for read-only variables
6020 accessed via a non-const declaration, in case it won't be detected
6021 any other way (e.g., in an embedded system or OS kernel without
6022 memory protection).
6024 Aggregates are not checked here; they're handled elsewhere. */
6025 if (cfun && current_function_check_memory_usage
6026 && code == VAR_DECL
6027 && GET_CODE (DECL_RTL (exp)) == MEM
6028 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6030 enum memory_use_mode memory_usage;
6031 memory_usage = get_memory_usage_from_modifier (modifier);
6033 in_check_memory_usage = 1;
6034 if (memory_usage != MEMORY_USE_DONT)
6035 emit_library_call (chkr_check_addr_libfunc,
6036 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6037 XEXP (DECL_RTL (exp), 0), Pmode,
6038 GEN_INT (int_size_in_bytes (type)),
6039 TYPE_MODE (sizetype),
6040 GEN_INT (memory_usage),
6041 TYPE_MODE (integer_type_node));
6042 in_check_memory_usage = 0;
6045 /* ... fall through ... */
6047 case FUNCTION_DECL:
6048 case RESULT_DECL:
6049 if (DECL_RTL (exp) == 0)
6050 abort ();
6052 /* Ensure variable marked as used even if it doesn't go through
6053 a parser. If it hasn't be used yet, write out an external
6054 definition. */
6055 if (! TREE_USED (exp))
6057 assemble_external (exp);
6058 TREE_USED (exp) = 1;
6061 /* Show we haven't gotten RTL for this yet. */
6062 temp = 0;
6064 /* Handle variables inherited from containing functions. */
6065 context = decl_function_context (exp);
6067 /* We treat inline_function_decl as an alias for the current function
6068 because that is the inline function whose vars, types, etc.
6069 are being merged into the current function.
6070 See expand_inline_function. */
6072 if (context != 0 && context != current_function_decl
6073 && context != inline_function_decl
6074 /* If var is static, we don't need a static chain to access it. */
6075 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6076 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6078 rtx addr;
6080 /* Mark as non-local and addressable. */
6081 DECL_NONLOCAL (exp) = 1;
6082 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6083 abort ();
6084 mark_addressable (exp);
6085 if (GET_CODE (DECL_RTL (exp)) != MEM)
6086 abort ();
6087 addr = XEXP (DECL_RTL (exp), 0);
6088 if (GET_CODE (addr) == MEM)
6089 addr = change_address (addr, Pmode,
6090 fix_lexical_addr (XEXP (addr, 0), exp));
6091 else
6092 addr = fix_lexical_addr (addr, exp);
6094 temp = change_address (DECL_RTL (exp), mode, addr);
6097 /* This is the case of an array whose size is to be determined
6098 from its initializer, while the initializer is still being parsed.
6099 See expand_decl. */
6101 else if (GET_CODE (DECL_RTL (exp)) == MEM
6102 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6103 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
6104 XEXP (DECL_RTL (exp), 0));
6106 /* If DECL_RTL is memory, we are in the normal case and either
6107 the address is not valid or it is not a register and -fforce-addr
6108 is specified, get the address into a register. */
6110 else if (GET_CODE (DECL_RTL (exp)) == MEM
6111 && modifier != EXPAND_CONST_ADDRESS
6112 && modifier != EXPAND_SUM
6113 && modifier != EXPAND_INITIALIZER
6114 && (! memory_address_p (DECL_MODE (exp),
6115 XEXP (DECL_RTL (exp), 0))
6116 || (flag_force_addr
6117 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6118 temp = change_address (DECL_RTL (exp), VOIDmode,
6119 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6121 /* If we got something, return it. But first, set the alignment
6122 the address is a register. */
6123 if (temp != 0)
6125 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6126 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6128 return temp;
6131 /* If the mode of DECL_RTL does not match that of the decl, it
6132 must be a promoted value. We return a SUBREG of the wanted mode,
6133 but mark it so that we know that it was already extended. */
6135 if (GET_CODE (DECL_RTL (exp)) == REG
6136 && GET_MODE (DECL_RTL (exp)) != mode)
6138 /* Get the signedness used for this variable. Ensure we get the
6139 same mode we got when the variable was declared. */
6140 if (GET_MODE (DECL_RTL (exp))
6141 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6142 abort ();
6144 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
6145 SUBREG_PROMOTED_VAR_P (temp) = 1;
6146 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6147 return temp;
6150 return DECL_RTL (exp);
6152 case INTEGER_CST:
6153 return immed_double_const (TREE_INT_CST_LOW (exp),
6154 TREE_INT_CST_HIGH (exp), mode);
6156 case CONST_DECL:
6157 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6158 EXPAND_MEMORY_USE_BAD);
6160 case REAL_CST:
6161 /* If optimized, generate immediate CONST_DOUBLE
6162 which will be turned into memory by reload if necessary.
6164 We used to force a register so that loop.c could see it. But
6165 this does not allow gen_* patterns to perform optimizations with
6166 the constants. It also produces two insns in cases like "x = 1.0;".
6167 On most machines, floating-point constants are not permitted in
6168 many insns, so we'd end up copying it to a register in any case.
6170 Now, we do the copying in expand_binop, if appropriate. */
6171 return immed_real_const (exp);
6173 case COMPLEX_CST:
6174 case STRING_CST:
6175 if (! TREE_CST_RTL (exp))
6176 output_constant_def (exp);
6178 /* TREE_CST_RTL probably contains a constant address.
6179 On RISC machines where a constant address isn't valid,
6180 make some insns to get that address into a register. */
6181 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6182 && modifier != EXPAND_CONST_ADDRESS
6183 && modifier != EXPAND_INITIALIZER
6184 && modifier != EXPAND_SUM
6185 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6186 || (flag_force_addr
6187 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6188 return change_address (TREE_CST_RTL (exp), VOIDmode,
6189 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6190 return TREE_CST_RTL (exp);
6192 case EXPR_WITH_FILE_LOCATION:
6194 rtx to_return;
6195 const char *saved_input_filename = input_filename;
6196 int saved_lineno = lineno;
6197 input_filename = EXPR_WFL_FILENAME (exp);
6198 lineno = EXPR_WFL_LINENO (exp);
6199 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6200 emit_line_note (input_filename, lineno);
6201 /* Possibly avoid switching back and force here. */
6202 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6203 input_filename = saved_input_filename;
6204 lineno = saved_lineno;
6205 return to_return;
6208 case SAVE_EXPR:
6209 context = decl_function_context (exp);
6211 /* If this SAVE_EXPR was at global context, assume we are an
6212 initialization function and move it into our context. */
6213 if (context == 0)
6214 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6216 /* We treat inline_function_decl as an alias for the current function
6217 because that is the inline function whose vars, types, etc.
6218 are being merged into the current function.
6219 See expand_inline_function. */
6220 if (context == current_function_decl || context == inline_function_decl)
6221 context = 0;
6223 /* If this is non-local, handle it. */
6224 if (context)
6226 /* The following call just exists to abort if the context is
6227 not of a containing function. */
6228 find_function_data (context);
6230 temp = SAVE_EXPR_RTL (exp);
6231 if (temp && GET_CODE (temp) == REG)
6233 put_var_into_stack (exp);
6234 temp = SAVE_EXPR_RTL (exp);
6236 if (temp == 0 || GET_CODE (temp) != MEM)
6237 abort ();
6238 return change_address (temp, mode,
6239 fix_lexical_addr (XEXP (temp, 0), exp));
6241 if (SAVE_EXPR_RTL (exp) == 0)
6243 if (mode == VOIDmode)
6244 temp = const0_rtx;
6245 else
6246 temp = assign_temp (type, 3, 0, 0);
6248 SAVE_EXPR_RTL (exp) = temp;
6249 if (!optimize && GET_CODE (temp) == REG)
6250 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6251 save_expr_regs);
6253 /* If the mode of TEMP does not match that of the expression, it
6254 must be a promoted value. We pass store_expr a SUBREG of the
6255 wanted mode but mark it so that we know that it was already
6256 extended. Note that `unsignedp' was modified above in
6257 this case. */
6259 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6261 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6262 SUBREG_PROMOTED_VAR_P (temp) = 1;
6263 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6266 if (temp == const0_rtx)
6267 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6268 EXPAND_MEMORY_USE_BAD);
6269 else
6270 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6272 TREE_USED (exp) = 1;
6275 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6276 must be a promoted value. We return a SUBREG of the wanted mode,
6277 but mark it so that we know that it was already extended. */
6279 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6280 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6282 /* Compute the signedness and make the proper SUBREG. */
6283 promote_mode (type, mode, &unsignedp, 0);
6284 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6285 SUBREG_PROMOTED_VAR_P (temp) = 1;
6286 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6287 return temp;
6290 return SAVE_EXPR_RTL (exp);
6292 case UNSAVE_EXPR:
6294 rtx temp;
6295 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6296 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6297 return temp;
6300 case PLACEHOLDER_EXPR:
6302 tree placeholder_expr;
6304 /* If there is an object on the head of the placeholder list,
6305 see if some object in it of type TYPE or a pointer to it. For
6306 further information, see tree.def. */
6307 for (placeholder_expr = placeholder_list;
6308 placeholder_expr != 0;
6309 placeholder_expr = TREE_CHAIN (placeholder_expr))
6311 tree need_type = TYPE_MAIN_VARIANT (type);
6312 tree object = 0;
6313 tree old_list = placeholder_list;
6314 tree elt;
6316 /* Find the outermost reference that is of the type we want.
6317 If none, see if any object has a type that is a pointer to
6318 the type we want. */
6319 for (elt = TREE_PURPOSE (placeholder_expr);
6320 elt != 0 && object == 0;
6322 = ((TREE_CODE (elt) == COMPOUND_EXPR
6323 || TREE_CODE (elt) == COND_EXPR)
6324 ? TREE_OPERAND (elt, 1)
6325 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6326 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6327 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6328 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6329 ? TREE_OPERAND (elt, 0) : 0))
6330 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6331 object = elt;
6333 for (elt = TREE_PURPOSE (placeholder_expr);
6334 elt != 0 && object == 0;
6336 = ((TREE_CODE (elt) == COMPOUND_EXPR
6337 || TREE_CODE (elt) == COND_EXPR)
6338 ? TREE_OPERAND (elt, 1)
6339 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6340 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6341 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6342 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6343 ? TREE_OPERAND (elt, 0) : 0))
6344 if (POINTER_TYPE_P (TREE_TYPE (elt))
6345 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6346 == need_type))
6347 object = build1 (INDIRECT_REF, need_type, elt);
6349 if (object != 0)
6351 /* Expand this object skipping the list entries before
6352 it was found in case it is also a PLACEHOLDER_EXPR.
6353 In that case, we want to translate it using subsequent
6354 entries. */
6355 placeholder_list = TREE_CHAIN (placeholder_expr);
6356 temp = expand_expr (object, original_target, tmode,
6357 ro_modifier);
6358 placeholder_list = old_list;
6359 return temp;
6364 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6365 abort ();
6367 case WITH_RECORD_EXPR:
6368 /* Put the object on the placeholder list, expand our first operand,
6369 and pop the list. */
6370 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6371 placeholder_list);
6372 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6373 tmode, ro_modifier);
6374 placeholder_list = TREE_CHAIN (placeholder_list);
6375 return target;
6377 case GOTO_EXPR:
6378 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6379 expand_goto (TREE_OPERAND (exp, 0));
6380 else
6381 expand_computed_goto (TREE_OPERAND (exp, 0));
6382 return const0_rtx;
6384 case EXIT_EXPR:
6385 expand_exit_loop_if_false (NULL_PTR,
6386 invert_truthvalue (TREE_OPERAND (exp, 0)));
6387 return const0_rtx;
6389 case LABELED_BLOCK_EXPR:
6390 if (LABELED_BLOCK_BODY (exp))
6391 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6392 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6393 return const0_rtx;
6395 case EXIT_BLOCK_EXPR:
6396 if (EXIT_BLOCK_RETURN (exp))
6397 sorry ("returned value in block_exit_expr");
6398 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6399 return const0_rtx;
6401 case LOOP_EXPR:
6402 push_temp_slots ();
6403 expand_start_loop (1);
6404 expand_expr_stmt (TREE_OPERAND (exp, 0));
6405 expand_end_loop ();
6406 pop_temp_slots ();
6408 return const0_rtx;
6410 case BIND_EXPR:
6412 tree vars = TREE_OPERAND (exp, 0);
6413 int vars_need_expansion = 0;
6415 /* Need to open a binding contour here because
6416 if there are any cleanups they must be contained here. */
6417 expand_start_bindings (2);
6419 /* Mark the corresponding BLOCK for output in its proper place. */
6420 if (TREE_OPERAND (exp, 2) != 0
6421 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6422 insert_block (TREE_OPERAND (exp, 2));
6424 /* If VARS have not yet been expanded, expand them now. */
6425 while (vars)
6427 if (DECL_RTL (vars) == 0)
6429 vars_need_expansion = 1;
6430 expand_decl (vars);
6432 expand_decl_init (vars);
6433 vars = TREE_CHAIN (vars);
6436 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6438 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6440 return temp;
6443 case RTL_EXPR:
6444 if (RTL_EXPR_SEQUENCE (exp))
6446 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6447 abort ();
6448 emit_insns (RTL_EXPR_SEQUENCE (exp));
6449 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6451 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6452 free_temps_for_rtl_expr (exp);
6453 return RTL_EXPR_RTL (exp);
6455 case CONSTRUCTOR:
6456 /* If we don't need the result, just ensure we evaluate any
6457 subexpressions. */
6458 if (ignore)
6460 tree elt;
6461 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6462 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6463 EXPAND_MEMORY_USE_BAD);
6464 return const0_rtx;
6467 /* All elts simple constants => refer to a constant in memory. But
6468 if this is a non-BLKmode mode, let it store a field at a time
6469 since that should make a CONST_INT or CONST_DOUBLE when we
6470 fold. Likewise, if we have a target we can use, it is best to
6471 store directly into the target unless the type is large enough
6472 that memcpy will be used. If we are making an initializer and
6473 all operands are constant, put it in memory as well. */
6474 else if ((TREE_STATIC (exp)
6475 && ((mode == BLKmode
6476 && ! (target != 0 && safe_from_p (target, exp, 1)))
6477 || TREE_ADDRESSABLE (exp)
6478 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6479 && (! MOVE_BY_PIECES_P
6480 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6481 TYPE_ALIGN (type)))
6482 && ! mostly_zeros_p (exp))))
6483 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6485 rtx constructor = output_constant_def (exp);
6487 if (modifier != EXPAND_CONST_ADDRESS
6488 && modifier != EXPAND_INITIALIZER
6489 && modifier != EXPAND_SUM
6490 && (! memory_address_p (GET_MODE (constructor),
6491 XEXP (constructor, 0))
6492 || (flag_force_addr
6493 && GET_CODE (XEXP (constructor, 0)) != REG)))
6494 constructor = change_address (constructor, VOIDmode,
6495 XEXP (constructor, 0));
6496 return constructor;
6499 else
6501 /* Handle calls that pass values in multiple non-contiguous
6502 locations. The Irix 6 ABI has examples of this. */
6503 if (target == 0 || ! safe_from_p (target, exp, 1)
6504 || GET_CODE (target) == PARALLEL)
6506 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6507 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6508 else
6509 target = assign_temp (type, 0, 1, 1);
6512 if (TREE_READONLY (exp))
6514 if (GET_CODE (target) == MEM)
6515 target = copy_rtx (target);
6517 RTX_UNCHANGING_P (target) = 1;
6520 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6521 int_size_in_bytes (TREE_TYPE (exp)));
6522 return target;
6525 case INDIRECT_REF:
6527 tree exp1 = TREE_OPERAND (exp, 0);
6528 tree index;
6529 tree string = string_constant (exp1, &index);
6531 /* Try to optimize reads from const strings. */
6532 if (string
6533 && TREE_CODE (string) == STRING_CST
6534 && TREE_CODE (index) == INTEGER_CST
6535 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6536 && GET_MODE_CLASS (mode) == MODE_INT
6537 && GET_MODE_SIZE (mode) == 1
6538 && modifier != EXPAND_MEMORY_USE_WO)
6539 return
6540 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6542 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6543 op0 = memory_address (mode, op0);
6545 if (cfun && current_function_check_memory_usage
6546 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6548 enum memory_use_mode memory_usage;
6549 memory_usage = get_memory_usage_from_modifier (modifier);
6551 if (memory_usage != MEMORY_USE_DONT)
6553 in_check_memory_usage = 1;
6554 emit_library_call (chkr_check_addr_libfunc,
6555 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6556 Pmode, GEN_INT (int_size_in_bytes (type)),
6557 TYPE_MODE (sizetype),
6558 GEN_INT (memory_usage),
6559 TYPE_MODE (integer_type_node));
6560 in_check_memory_usage = 0;
6564 temp = gen_rtx_MEM (mode, op0);
6565 set_mem_attributes (temp, exp, 0);
6567 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6568 here, because, in C and C++, the fact that a location is accessed
6569 through a pointer to const does not mean that the value there can
6570 never change. Languages where it can never change should
6571 also set TREE_STATIC. */
6572 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6574 /* If we are writing to this object and its type is a record with
6575 readonly fields, we must mark it as readonly so it will
6576 conflict with readonly references to those fields. */
6577 if (modifier == EXPAND_MEMORY_USE_WO
6578 && TREE_CODE (type) == RECORD_TYPE && readonly_fields_p (type))
6579 RTX_UNCHANGING_P (temp) = 1;
6581 return temp;
6584 case ARRAY_REF:
6585 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6586 abort ();
6589 tree array = TREE_OPERAND (exp, 0);
6590 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6591 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6592 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6593 HOST_WIDE_INT i;
6595 /* Optimize the special-case of a zero lower bound.
6597 We convert the low_bound to sizetype to avoid some problems
6598 with constant folding. (E.g. suppose the lower bound is 1,
6599 and its mode is QI. Without the conversion, (ARRAY
6600 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6601 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6603 if (! integer_zerop (low_bound))
6604 index = size_diffop (index, convert (sizetype, low_bound));
6606 /* Fold an expression like: "foo"[2].
6607 This is not done in fold so it won't happen inside &.
6608 Don't fold if this is for wide characters since it's too
6609 difficult to do correctly and this is a very rare case. */
6611 if (TREE_CODE (array) == STRING_CST
6612 && TREE_CODE (index) == INTEGER_CST
6613 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6614 && GET_MODE_CLASS (mode) == MODE_INT
6615 && GET_MODE_SIZE (mode) == 1)
6616 return
6617 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6619 /* If this is a constant index into a constant array,
6620 just get the value from the array. Handle both the cases when
6621 we have an explicit constructor and when our operand is a variable
6622 that was declared const. */
6624 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6625 && TREE_CODE (index) == INTEGER_CST
6626 && 0 > compare_tree_int (index,
6627 list_length (CONSTRUCTOR_ELTS
6628 (TREE_OPERAND (exp, 0)))))
6630 tree elem;
6632 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6633 i = TREE_INT_CST_LOW (index);
6634 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6637 if (elem)
6638 return expand_expr (fold (TREE_VALUE (elem)), target,
6639 tmode, ro_modifier);
6642 else if (optimize >= 1
6643 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6644 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6645 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6647 if (TREE_CODE (index) == INTEGER_CST)
6649 tree init = DECL_INITIAL (array);
6651 if (TREE_CODE (init) == CONSTRUCTOR)
6653 tree elem;
6655 for (elem = CONSTRUCTOR_ELTS (init);
6656 (elem
6657 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6658 elem = TREE_CHAIN (elem))
6661 if (elem)
6662 return expand_expr (fold (TREE_VALUE (elem)), target,
6663 tmode, ro_modifier);
6665 else if (TREE_CODE (init) == STRING_CST
6666 && 0 > compare_tree_int (index,
6667 TREE_STRING_LENGTH (init)))
6669 tree type = TREE_TYPE (TREE_TYPE (init));
6670 enum machine_mode mode = TYPE_MODE (type);
6672 if (GET_MODE_CLASS (mode) == MODE_INT
6673 && GET_MODE_SIZE (mode) == 1)
6674 return (GEN_INT
6675 (TREE_STRING_POINTER
6676 (init)[TREE_INT_CST_LOW (index)]));
6681 /* Fall through. */
6683 case COMPONENT_REF:
6684 case BIT_FIELD_REF:
6685 /* If the operand is a CONSTRUCTOR, we can just extract the
6686 appropriate field if it is present. Don't do this if we have
6687 already written the data since we want to refer to that copy
6688 and varasm.c assumes that's what we'll do. */
6689 if (code != ARRAY_REF
6690 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6691 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6693 tree elt;
6695 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6696 elt = TREE_CHAIN (elt))
6697 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6698 /* We can normally use the value of the field in the
6699 CONSTRUCTOR. However, if this is a bitfield in
6700 an integral mode that we can fit in a HOST_WIDE_INT,
6701 we must mask only the number of bits in the bitfield,
6702 since this is done implicitly by the constructor. If
6703 the bitfield does not meet either of those conditions,
6704 we can't do this optimization. */
6705 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6706 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6707 == MODE_INT)
6708 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6709 <= HOST_BITS_PER_WIDE_INT))))
6711 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6712 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6714 HOST_WIDE_INT bitsize
6715 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6717 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6719 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6720 op0 = expand_and (op0, op1, target);
6722 else
6724 enum machine_mode imode
6725 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6726 tree count
6727 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6730 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6731 target, 0);
6732 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6733 target, 0);
6737 return op0;
6742 enum machine_mode mode1;
6743 HOST_WIDE_INT bitsize, bitpos;
6744 tree offset;
6745 int volatilep = 0;
6746 unsigned int alignment;
6747 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6748 &mode1, &unsignedp, &volatilep,
6749 &alignment);
6751 /* If we got back the original object, something is wrong. Perhaps
6752 we are evaluating an expression too early. In any event, don't
6753 infinitely recurse. */
6754 if (tem == exp)
6755 abort ();
6757 /* If TEM's type is a union of variable size, pass TARGET to the inner
6758 computation, since it will need a temporary and TARGET is known
6759 to have to do. This occurs in unchecked conversion in Ada. */
6761 op0 = expand_expr (tem,
6762 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6763 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6764 != INTEGER_CST)
6765 ? target : NULL_RTX),
6766 VOIDmode,
6767 (modifier == EXPAND_INITIALIZER
6768 || modifier == EXPAND_CONST_ADDRESS)
6769 ? modifier : EXPAND_NORMAL);
6771 /* If this is a constant, put it into a register if it is a
6772 legitimate constant and OFFSET is 0 and memory if it isn't. */
6773 if (CONSTANT_P (op0))
6775 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6776 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6777 && offset == 0)
6778 op0 = force_reg (mode, op0);
6779 else
6780 op0 = validize_mem (force_const_mem (mode, op0));
6783 if (offset != 0)
6785 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6787 /* If this object is in memory, put it into a register.
6788 This case can't occur in C, but can in Ada if we have
6789 unchecked conversion of an expression from a scalar type to
6790 an array or record type. */
6791 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6792 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6794 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
6796 mark_temp_addr_taken (memloc);
6797 emit_move_insn (memloc, op0);
6798 op0 = memloc;
6801 if (GET_CODE (op0) != MEM)
6802 abort ();
6804 if (GET_MODE (offset_rtx) != ptr_mode)
6806 #ifdef POINTERS_EXTEND_UNSIGNED
6807 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6808 #else
6809 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6810 #endif
6813 /* A constant address in OP0 can have VOIDmode, we must not try
6814 to call force_reg for that case. Avoid that case. */
6815 if (GET_CODE (op0) == MEM
6816 && GET_MODE (op0) == BLKmode
6817 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6818 && bitsize != 0
6819 && (bitpos % bitsize) == 0
6820 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6821 && alignment == GET_MODE_ALIGNMENT (mode1))
6823 rtx temp = change_address (op0, mode1,
6824 plus_constant (XEXP (op0, 0),
6825 (bitpos /
6826 BITS_PER_UNIT)));
6827 if (GET_CODE (XEXP (temp, 0)) == REG)
6828 op0 = temp;
6829 else
6830 op0 = change_address (op0, mode1,
6831 force_reg (GET_MODE (XEXP (temp, 0)),
6832 XEXP (temp, 0)));
6833 bitpos = 0;
6836 op0 = change_address (op0, VOIDmode,
6837 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6838 force_reg (ptr_mode,
6839 offset_rtx)));
6842 /* Don't forget about volatility even if this is a bitfield. */
6843 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6845 op0 = copy_rtx (op0);
6846 MEM_VOLATILE_P (op0) = 1;
6849 /* Check the access. */
6850 if (cfun != 0 && current_function_check_memory_usage
6851 && GET_CODE (op0) == MEM)
6853 enum memory_use_mode memory_usage;
6854 memory_usage = get_memory_usage_from_modifier (modifier);
6856 if (memory_usage != MEMORY_USE_DONT)
6858 rtx to;
6859 int size;
6861 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6862 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6864 /* Check the access right of the pointer. */
6865 in_check_memory_usage = 1;
6866 if (size > BITS_PER_UNIT)
6867 emit_library_call (chkr_check_addr_libfunc,
6868 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
6869 Pmode, GEN_INT (size / BITS_PER_UNIT),
6870 TYPE_MODE (sizetype),
6871 GEN_INT (memory_usage),
6872 TYPE_MODE (integer_type_node));
6873 in_check_memory_usage = 0;
6877 /* In cases where an aligned union has an unaligned object
6878 as a field, we might be extracting a BLKmode value from
6879 an integer-mode (e.g., SImode) object. Handle this case
6880 by doing the extract into an object as wide as the field
6881 (which we know to be the width of a basic mode), then
6882 storing into memory, and changing the mode to BLKmode.
6883 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6884 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6885 if (mode1 == VOIDmode
6886 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6887 || (modifier != EXPAND_CONST_ADDRESS
6888 && modifier != EXPAND_INITIALIZER
6889 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6890 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6891 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6892 /* If the field isn't aligned enough to fetch as a memref,
6893 fetch it as a bit field. */
6894 || (mode1 != BLKmode
6895 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
6896 && ((TYPE_ALIGN (TREE_TYPE (tem))
6897 < GET_MODE_ALIGNMENT (mode))
6898 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
6899 /* If the type and the field are a constant size and the
6900 size of the type isn't the same size as the bitfield,
6901 we must use bitfield operations. */
6902 || ((bitsize >= 0
6903 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
6904 == INTEGER_CST)
6905 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6906 bitsize)))))
6907 || (modifier != EXPAND_CONST_ADDRESS
6908 && modifier != EXPAND_INITIALIZER
6909 && mode == BLKmode
6910 && SLOW_UNALIGNED_ACCESS (mode, alignment)
6911 && (TYPE_ALIGN (type) > alignment
6912 || bitpos % TYPE_ALIGN (type) != 0)))
6914 enum machine_mode ext_mode = mode;
6916 if (ext_mode == BLKmode
6917 && ! (target != 0 && GET_CODE (op0) == MEM
6918 && GET_CODE (target) == MEM
6919 && bitpos % BITS_PER_UNIT == 0))
6920 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6922 if (ext_mode == BLKmode)
6924 /* In this case, BITPOS must start at a byte boundary and
6925 TARGET, if specified, must be a MEM. */
6926 if (GET_CODE (op0) != MEM
6927 || (target != 0 && GET_CODE (target) != MEM)
6928 || bitpos % BITS_PER_UNIT != 0)
6929 abort ();
6931 op0 = change_address (op0, VOIDmode,
6932 plus_constant (XEXP (op0, 0),
6933 bitpos / BITS_PER_UNIT));
6934 if (target == 0)
6935 target = assign_temp (type, 0, 1, 1);
6937 emit_block_move (target, op0,
6938 bitsize == -1 ? expr_size (exp)
6939 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6940 / BITS_PER_UNIT),
6941 BITS_PER_UNIT);
6943 return target;
6946 op0 = validize_mem (op0);
6948 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6949 mark_reg_pointer (XEXP (op0, 0), alignment);
6951 op0 = extract_bit_field (op0, bitsize, bitpos,
6952 unsignedp, target, ext_mode, ext_mode,
6953 alignment,
6954 int_size_in_bytes (TREE_TYPE (tem)));
6956 /* If the result is a record type and BITSIZE is narrower than
6957 the mode of OP0, an integral mode, and this is a big endian
6958 machine, we must put the field into the high-order bits. */
6959 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6960 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6961 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6962 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6963 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6964 - bitsize),
6965 op0, 1);
6967 if (mode == BLKmode)
6969 rtx new = assign_stack_temp (ext_mode,
6970 bitsize / BITS_PER_UNIT, 0);
6972 emit_move_insn (new, op0);
6973 op0 = copy_rtx (new);
6974 PUT_MODE (op0, BLKmode);
6975 MEM_SET_IN_STRUCT_P (op0, 1);
6978 return op0;
6981 /* If the result is BLKmode, use that to access the object
6982 now as well. */
6983 if (mode == BLKmode)
6984 mode1 = BLKmode;
6986 /* Get a reference to just this component. */
6987 if (modifier == EXPAND_CONST_ADDRESS
6988 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6990 rtx new = gen_rtx_MEM (mode1,
6991 plus_constant (XEXP (op0, 0),
6992 (bitpos / BITS_PER_UNIT)));
6994 MEM_COPY_ATTRIBUTES (new, op0);
6995 op0 = new;
6997 else
6998 op0 = change_address (op0, mode1,
6999 plus_constant (XEXP (op0, 0),
7000 (bitpos / BITS_PER_UNIT)));
7002 set_mem_attributes (op0, exp, 0);
7003 if (GET_CODE (XEXP (op0, 0)) == REG)
7004 mark_reg_pointer (XEXP (op0, 0), alignment);
7006 MEM_VOLATILE_P (op0) |= volatilep;
7007 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7008 || modifier == EXPAND_CONST_ADDRESS
7009 || modifier == EXPAND_INITIALIZER)
7010 return op0;
7011 else if (target == 0)
7012 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7014 convert_move (target, op0, unsignedp);
7015 return target;
7018 /* Intended for a reference to a buffer of a file-object in Pascal.
7019 But it's not certain that a special tree code will really be
7020 necessary for these. INDIRECT_REF might work for them. */
7021 case BUFFER_REF:
7022 abort ();
7024 case IN_EXPR:
7026 /* Pascal set IN expression.
7028 Algorithm:
7029 rlo = set_low - (set_low%bits_per_word);
7030 the_word = set [ (index - rlo)/bits_per_word ];
7031 bit_index = index % bits_per_word;
7032 bitmask = 1 << bit_index;
7033 return !!(the_word & bitmask); */
7035 tree set = TREE_OPERAND (exp, 0);
7036 tree index = TREE_OPERAND (exp, 1);
7037 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7038 tree set_type = TREE_TYPE (set);
7039 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7040 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7041 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7042 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7043 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7044 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7045 rtx setaddr = XEXP (setval, 0);
7046 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7047 rtx rlow;
7048 rtx diff, quo, rem, addr, bit, result;
7050 preexpand_calls (exp);
7052 /* If domain is empty, answer is no. Likewise if index is constant
7053 and out of bounds. */
7054 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7055 && TREE_CODE (set_low_bound) == INTEGER_CST
7056 && tree_int_cst_lt (set_high_bound, set_low_bound))
7057 || (TREE_CODE (index) == INTEGER_CST
7058 && TREE_CODE (set_low_bound) == INTEGER_CST
7059 && tree_int_cst_lt (index, set_low_bound))
7060 || (TREE_CODE (set_high_bound) == INTEGER_CST
7061 && TREE_CODE (index) == INTEGER_CST
7062 && tree_int_cst_lt (set_high_bound, index))))
7063 return const0_rtx;
7065 if (target == 0)
7066 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7068 /* If we get here, we have to generate the code for both cases
7069 (in range and out of range). */
7071 op0 = gen_label_rtx ();
7072 op1 = gen_label_rtx ();
7074 if (! (GET_CODE (index_val) == CONST_INT
7075 && GET_CODE (lo_r) == CONST_INT))
7077 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7078 GET_MODE (index_val), iunsignedp, 0, op1);
7081 if (! (GET_CODE (index_val) == CONST_INT
7082 && GET_CODE (hi_r) == CONST_INT))
7084 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7085 GET_MODE (index_val), iunsignedp, 0, op1);
7088 /* Calculate the element number of bit zero in the first word
7089 of the set. */
7090 if (GET_CODE (lo_r) == CONST_INT)
7091 rlow = GEN_INT (INTVAL (lo_r)
7092 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7093 else
7094 rlow = expand_binop (index_mode, and_optab, lo_r,
7095 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7096 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7098 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7099 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7101 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7102 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7103 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7104 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7106 addr = memory_address (byte_mode,
7107 expand_binop (index_mode, add_optab, diff,
7108 setaddr, NULL_RTX, iunsignedp,
7109 OPTAB_LIB_WIDEN));
7111 /* Extract the bit we want to examine. */
7112 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7113 gen_rtx_MEM (byte_mode, addr),
7114 make_tree (TREE_TYPE (index), rem),
7115 NULL_RTX, 1);
7116 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7117 GET_MODE (target) == byte_mode ? target : 0,
7118 1, OPTAB_LIB_WIDEN);
7120 if (result != target)
7121 convert_move (target, result, 1);
7123 /* Output the code to handle the out-of-range case. */
7124 emit_jump (op0);
7125 emit_label (op1);
7126 emit_move_insn (target, const0_rtx);
7127 emit_label (op0);
7128 return target;
7131 case WITH_CLEANUP_EXPR:
7132 if (RTL_EXPR_RTL (exp) == 0)
7134 RTL_EXPR_RTL (exp)
7135 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7136 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
7138 /* That's it for this cleanup. */
7139 TREE_OPERAND (exp, 2) = 0;
7141 return RTL_EXPR_RTL (exp);
7143 case CLEANUP_POINT_EXPR:
7145 /* Start a new binding layer that will keep track of all cleanup
7146 actions to be performed. */
7147 expand_start_bindings (2);
7149 target_temp_slot_level = temp_slot_level;
7151 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7152 /* If we're going to use this value, load it up now. */
7153 if (! ignore)
7154 op0 = force_not_mem (op0);
7155 preserve_temp_slots (op0);
7156 expand_end_bindings (NULL_TREE, 0, 0);
7158 return op0;
7160 case CALL_EXPR:
7161 /* Check for a built-in function. */
7162 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7163 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7164 == FUNCTION_DECL)
7165 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7167 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7168 == BUILT_IN_FRONTEND)
7169 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7170 else
7171 return expand_builtin (exp, target, subtarget, tmode, ignore);
7174 /* If this call was expanded already by preexpand_calls,
7175 just return the result we got. */
7176 if (CALL_EXPR_RTL (exp) != 0)
7177 return CALL_EXPR_RTL (exp);
7179 return expand_call (exp, target, ignore);
7181 case NON_LVALUE_EXPR:
7182 case NOP_EXPR:
7183 case CONVERT_EXPR:
7184 case REFERENCE_EXPR:
7185 if (TREE_OPERAND (exp, 0) == error_mark_node)
7186 return const0_rtx;
7188 if (TREE_CODE (type) == UNION_TYPE)
7190 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7192 /* If both input and output are BLKmode, this conversion
7193 isn't actually doing anything unless we need to make the
7194 alignment stricter. */
7195 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7196 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7197 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7198 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7199 modifier);
7201 if (target == 0)
7203 if (mode != BLKmode)
7204 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7205 else
7206 target = assign_temp (type, 0, 1, 1);
7209 if (GET_CODE (target) == MEM)
7210 /* Store data into beginning of memory target. */
7211 store_expr (TREE_OPERAND (exp, 0),
7212 change_address (target, TYPE_MODE (valtype), 0), 0);
7214 else if (GET_CODE (target) == REG)
7215 /* Store this field into a union of the proper type. */
7216 store_field (target,
7217 MIN ((int_size_in_bytes (TREE_TYPE
7218 (TREE_OPERAND (exp, 0)))
7219 * BITS_PER_UNIT),
7220 GET_MODE_BITSIZE (mode)),
7221 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7222 VOIDmode, 0, BITS_PER_UNIT,
7223 int_size_in_bytes (type), 0);
7224 else
7225 abort ();
7227 /* Return the entire union. */
7228 return target;
7231 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7233 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7234 ro_modifier);
7236 /* If the signedness of the conversion differs and OP0 is
7237 a promoted SUBREG, clear that indication since we now
7238 have to do the proper extension. */
7239 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7240 && GET_CODE (op0) == SUBREG)
7241 SUBREG_PROMOTED_VAR_P (op0) = 0;
7243 return op0;
7246 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7247 if (GET_MODE (op0) == mode)
7248 return op0;
7250 /* If OP0 is a constant, just convert it into the proper mode. */
7251 if (CONSTANT_P (op0))
7252 return
7253 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7254 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7256 if (modifier == EXPAND_INITIALIZER)
7257 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7259 if (target == 0)
7260 return
7261 convert_to_mode (mode, op0,
7262 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7263 else
7264 convert_move (target, op0,
7265 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7266 return target;
7268 case PLUS_EXPR:
7269 /* We come here from MINUS_EXPR when the second operand is a
7270 constant. */
7271 plus_expr:
7272 this_optab = add_optab;
7274 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7275 something else, make sure we add the register to the constant and
7276 then to the other thing. This case can occur during strength
7277 reduction and doing it this way will produce better code if the
7278 frame pointer or argument pointer is eliminated.
7280 fold-const.c will ensure that the constant is always in the inner
7281 PLUS_EXPR, so the only case we need to do anything about is if
7282 sp, ap, or fp is our second argument, in which case we must swap
7283 the innermost first argument and our second argument. */
7285 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7286 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7287 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7288 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7289 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7290 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7292 tree t = TREE_OPERAND (exp, 1);
7294 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7295 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7298 /* If the result is to be ptr_mode and we are adding an integer to
7299 something, we might be forming a constant. So try to use
7300 plus_constant. If it produces a sum and we can't accept it,
7301 use force_operand. This allows P = &ARR[const] to generate
7302 efficient code on machines where a SYMBOL_REF is not a valid
7303 address.
7305 If this is an EXPAND_SUM call, always return the sum. */
7306 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7307 || mode == ptr_mode)
7309 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7310 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7311 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7313 rtx constant_part;
7315 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7316 EXPAND_SUM);
7317 /* Use immed_double_const to ensure that the constant is
7318 truncated according to the mode of OP1, then sign extended
7319 to a HOST_WIDE_INT. Using the constant directly can result
7320 in non-canonical RTL in a 64x32 cross compile. */
7321 constant_part
7322 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7323 (HOST_WIDE_INT) 0,
7324 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7325 op1 = plus_constant (op1, INTVAL (constant_part));
7326 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7327 op1 = force_operand (op1, target);
7328 return op1;
7331 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7332 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7333 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7335 rtx constant_part;
7337 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7338 EXPAND_SUM);
7339 if (! CONSTANT_P (op0))
7341 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7342 VOIDmode, modifier);
7343 /* Don't go to both_summands if modifier
7344 says it's not right to return a PLUS. */
7345 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7346 goto binop2;
7347 goto both_summands;
7349 /* Use immed_double_const to ensure that the constant is
7350 truncated according to the mode of OP1, then sign extended
7351 to a HOST_WIDE_INT. Using the constant directly can result
7352 in non-canonical RTL in a 64x32 cross compile. */
7353 constant_part
7354 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7355 (HOST_WIDE_INT) 0,
7356 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7357 op0 = plus_constant (op0, INTVAL (constant_part));
7358 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7359 op0 = force_operand (op0, target);
7360 return op0;
7364 /* No sense saving up arithmetic to be done
7365 if it's all in the wrong mode to form part of an address.
7366 And force_operand won't know whether to sign-extend or
7367 zero-extend. */
7368 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7369 || mode != ptr_mode)
7370 goto binop;
7372 preexpand_calls (exp);
7373 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7374 subtarget = 0;
7376 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7377 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7379 both_summands:
7380 /* Make sure any term that's a sum with a constant comes last. */
7381 if (GET_CODE (op0) == PLUS
7382 && CONSTANT_P (XEXP (op0, 1)))
7384 temp = op0;
7385 op0 = op1;
7386 op1 = temp;
7388 /* If adding to a sum including a constant,
7389 associate it to put the constant outside. */
7390 if (GET_CODE (op1) == PLUS
7391 && CONSTANT_P (XEXP (op1, 1)))
7393 rtx constant_term = const0_rtx;
7395 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7396 if (temp != 0)
7397 op0 = temp;
7398 /* Ensure that MULT comes first if there is one. */
7399 else if (GET_CODE (op0) == MULT)
7400 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7401 else
7402 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7404 /* Let's also eliminate constants from op0 if possible. */
7405 op0 = eliminate_constant_term (op0, &constant_term);
7407 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7408 their sum should be a constant. Form it into OP1, since the
7409 result we want will then be OP0 + OP1. */
7411 temp = simplify_binary_operation (PLUS, mode, constant_term,
7412 XEXP (op1, 1));
7413 if (temp != 0)
7414 op1 = temp;
7415 else
7416 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7419 /* Put a constant term last and put a multiplication first. */
7420 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7421 temp = op1, op1 = op0, op0 = temp;
7423 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7424 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7426 case MINUS_EXPR:
7427 /* For initializers, we are allowed to return a MINUS of two
7428 symbolic constants. Here we handle all cases when both operands
7429 are constant. */
7430 /* Handle difference of two symbolic constants,
7431 for the sake of an initializer. */
7432 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7433 && really_constant_p (TREE_OPERAND (exp, 0))
7434 && really_constant_p (TREE_OPERAND (exp, 1)))
7436 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7437 VOIDmode, ro_modifier);
7438 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7439 VOIDmode, ro_modifier);
7441 /* If the last operand is a CONST_INT, use plus_constant of
7442 the negated constant. Else make the MINUS. */
7443 if (GET_CODE (op1) == CONST_INT)
7444 return plus_constant (op0, - INTVAL (op1));
7445 else
7446 return gen_rtx_MINUS (mode, op0, op1);
7448 /* Convert A - const to A + (-const). */
7449 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7451 tree negated = fold (build1 (NEGATE_EXPR, type,
7452 TREE_OPERAND (exp, 1)));
7454 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7455 /* If we can't negate the constant in TYPE, leave it alone and
7456 expand_binop will negate it for us. We used to try to do it
7457 here in the signed version of TYPE, but that doesn't work
7458 on POINTER_TYPEs. */;
7459 else
7461 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7462 goto plus_expr;
7465 this_optab = sub_optab;
7466 goto binop;
7468 case MULT_EXPR:
7469 preexpand_calls (exp);
7470 /* If first operand is constant, swap them.
7471 Thus the following special case checks need only
7472 check the second operand. */
7473 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7475 register tree t1 = TREE_OPERAND (exp, 0);
7476 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7477 TREE_OPERAND (exp, 1) = t1;
7480 /* Attempt to return something suitable for generating an
7481 indexed address, for machines that support that. */
7483 if (modifier == EXPAND_SUM && mode == ptr_mode
7484 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7485 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7487 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7488 EXPAND_SUM);
7490 /* Apply distributive law if OP0 is x+c. */
7491 if (GET_CODE (op0) == PLUS
7492 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7493 return
7494 gen_rtx_PLUS
7495 (mode,
7496 gen_rtx_MULT
7497 (mode, XEXP (op0, 0),
7498 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7499 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7500 * INTVAL (XEXP (op0, 1))));
7502 if (GET_CODE (op0) != REG)
7503 op0 = force_operand (op0, NULL_RTX);
7504 if (GET_CODE (op0) != REG)
7505 op0 = copy_to_mode_reg (mode, op0);
7507 return
7508 gen_rtx_MULT (mode, op0,
7509 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7512 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7513 subtarget = 0;
7515 /* Check for multiplying things that have been extended
7516 from a narrower type. If this machine supports multiplying
7517 in that narrower type with a result in the desired type,
7518 do it that way, and avoid the explicit type-conversion. */
7519 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7520 && TREE_CODE (type) == INTEGER_TYPE
7521 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7522 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7523 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7524 && int_fits_type_p (TREE_OPERAND (exp, 1),
7525 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7526 /* Don't use a widening multiply if a shift will do. */
7527 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7528 > HOST_BITS_PER_WIDE_INT)
7529 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7531 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7532 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7534 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7535 /* If both operands are extended, they must either both
7536 be zero-extended or both be sign-extended. */
7537 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7539 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7541 enum machine_mode innermode
7542 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7543 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7544 ? smul_widen_optab : umul_widen_optab);
7545 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7546 ? umul_widen_optab : smul_widen_optab);
7547 if (mode == GET_MODE_WIDER_MODE (innermode))
7549 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7551 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7552 NULL_RTX, VOIDmode, 0);
7553 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7554 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7555 VOIDmode, 0);
7556 else
7557 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7558 NULL_RTX, VOIDmode, 0);
7559 goto binop2;
7561 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7562 && innermode == word_mode)
7564 rtx htem;
7565 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7566 NULL_RTX, VOIDmode, 0);
7567 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7568 op1 = convert_modes (innermode, mode,
7569 expand_expr (TREE_OPERAND (exp, 1),
7570 NULL_RTX, VOIDmode, 0),
7571 unsignedp);
7572 else
7573 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7574 NULL_RTX, VOIDmode, 0);
7575 temp = expand_binop (mode, other_optab, op0, op1, target,
7576 unsignedp, OPTAB_LIB_WIDEN);
7577 htem = expand_mult_highpart_adjust (innermode,
7578 gen_highpart (innermode, temp),
7579 op0, op1,
7580 gen_highpart (innermode, temp),
7581 unsignedp);
7582 emit_move_insn (gen_highpart (innermode, temp), htem);
7583 return temp;
7587 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7588 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7589 return expand_mult (mode, op0, op1, target, unsignedp);
7591 case TRUNC_DIV_EXPR:
7592 case FLOOR_DIV_EXPR:
7593 case CEIL_DIV_EXPR:
7594 case ROUND_DIV_EXPR:
7595 case EXACT_DIV_EXPR:
7596 preexpand_calls (exp);
7597 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7598 subtarget = 0;
7599 /* Possible optimization: compute the dividend with EXPAND_SUM
7600 then if the divisor is constant can optimize the case
7601 where some terms of the dividend have coeffs divisible by it. */
7602 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7603 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7604 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7606 case RDIV_EXPR:
7607 this_optab = flodiv_optab;
7608 goto binop;
7610 case TRUNC_MOD_EXPR:
7611 case FLOOR_MOD_EXPR:
7612 case CEIL_MOD_EXPR:
7613 case ROUND_MOD_EXPR:
7614 preexpand_calls (exp);
7615 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7616 subtarget = 0;
7617 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7618 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7619 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7621 case FIX_ROUND_EXPR:
7622 case FIX_FLOOR_EXPR:
7623 case FIX_CEIL_EXPR:
7624 abort (); /* Not used for C. */
7626 case FIX_TRUNC_EXPR:
7627 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7628 if (target == 0)
7629 target = gen_reg_rtx (mode);
7630 expand_fix (target, op0, unsignedp);
7631 return target;
7633 case FLOAT_EXPR:
7634 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7635 if (target == 0)
7636 target = gen_reg_rtx (mode);
7637 /* expand_float can't figure out what to do if FROM has VOIDmode.
7638 So give it the correct mode. With -O, cse will optimize this. */
7639 if (GET_MODE (op0) == VOIDmode)
7640 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7641 op0);
7642 expand_float (target, op0,
7643 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7644 return target;
7646 case NEGATE_EXPR:
7647 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7648 temp = expand_unop (mode, neg_optab, op0, target, 0);
7649 if (temp == 0)
7650 abort ();
7651 return temp;
7653 case ABS_EXPR:
7654 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7656 /* Handle complex values specially. */
7657 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7658 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7659 return expand_complex_abs (mode, op0, target, unsignedp);
7661 /* Unsigned abs is simply the operand. Testing here means we don't
7662 risk generating incorrect code below. */
7663 if (TREE_UNSIGNED (type))
7664 return op0;
7666 return expand_abs (mode, op0, target,
7667 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7669 case MAX_EXPR:
7670 case MIN_EXPR:
7671 target = original_target;
7672 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7673 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7674 || GET_MODE (target) != mode
7675 || (GET_CODE (target) == REG
7676 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7677 target = gen_reg_rtx (mode);
7678 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7679 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7681 /* First try to do it with a special MIN or MAX instruction.
7682 If that does not win, use a conditional jump to select the proper
7683 value. */
7684 this_optab = (TREE_UNSIGNED (type)
7685 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7686 : (code == MIN_EXPR ? smin_optab : smax_optab));
7688 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7689 OPTAB_WIDEN);
7690 if (temp != 0)
7691 return temp;
7693 /* At this point, a MEM target is no longer useful; we will get better
7694 code without it. */
7696 if (GET_CODE (target) == MEM)
7697 target = gen_reg_rtx (mode);
7699 if (target != op0)
7700 emit_move_insn (target, op0);
7702 op0 = gen_label_rtx ();
7704 /* If this mode is an integer too wide to compare properly,
7705 compare word by word. Rely on cse to optimize constant cases. */
7706 if (GET_MODE_CLASS (mode) == MODE_INT
7707 && ! can_compare_p (GE, mode, ccp_jump))
7709 if (code == MAX_EXPR)
7710 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7711 target, op1, NULL_RTX, op0);
7712 else
7713 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7714 op1, target, NULL_RTX, op0);
7716 else
7718 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7719 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7720 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7721 op0);
7723 emit_move_insn (target, op1);
7724 emit_label (op0);
7725 return target;
7727 case BIT_NOT_EXPR:
7728 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7729 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7730 if (temp == 0)
7731 abort ();
7732 return temp;
7734 case FFS_EXPR:
7735 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7736 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7737 if (temp == 0)
7738 abort ();
7739 return temp;
7741 /* ??? Can optimize bitwise operations with one arg constant.
7742 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7743 and (a bitwise1 b) bitwise2 b (etc)
7744 but that is probably not worth while. */
7746 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7747 boolean values when we want in all cases to compute both of them. In
7748 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7749 as actual zero-or-1 values and then bitwise anding. In cases where
7750 there cannot be any side effects, better code would be made by
7751 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7752 how to recognize those cases. */
7754 case TRUTH_AND_EXPR:
7755 case BIT_AND_EXPR:
7756 this_optab = and_optab;
7757 goto binop;
7759 case TRUTH_OR_EXPR:
7760 case BIT_IOR_EXPR:
7761 this_optab = ior_optab;
7762 goto binop;
7764 case TRUTH_XOR_EXPR:
7765 case BIT_XOR_EXPR:
7766 this_optab = xor_optab;
7767 goto binop;
7769 case LSHIFT_EXPR:
7770 case RSHIFT_EXPR:
7771 case LROTATE_EXPR:
7772 case RROTATE_EXPR:
7773 preexpand_calls (exp);
7774 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7775 subtarget = 0;
7776 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7777 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7778 unsignedp);
7780 /* Could determine the answer when only additive constants differ. Also,
7781 the addition of one can be handled by changing the condition. */
7782 case LT_EXPR:
7783 case LE_EXPR:
7784 case GT_EXPR:
7785 case GE_EXPR:
7786 case EQ_EXPR:
7787 case NE_EXPR:
7788 case UNORDERED_EXPR:
7789 case ORDERED_EXPR:
7790 case UNLT_EXPR:
7791 case UNLE_EXPR:
7792 case UNGT_EXPR:
7793 case UNGE_EXPR:
7794 case UNEQ_EXPR:
7795 preexpand_calls (exp);
7796 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7797 if (temp != 0)
7798 return temp;
7800 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7801 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7802 && original_target
7803 && GET_CODE (original_target) == REG
7804 && (GET_MODE (original_target)
7805 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7807 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7808 VOIDmode, 0);
7810 if (temp != original_target)
7811 temp = copy_to_reg (temp);
7813 op1 = gen_label_rtx ();
7814 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7815 GET_MODE (temp), unsignedp, 0, op1);
7816 emit_move_insn (temp, const1_rtx);
7817 emit_label (op1);
7818 return temp;
7821 /* If no set-flag instruction, must generate a conditional
7822 store into a temporary variable. Drop through
7823 and handle this like && and ||. */
7825 case TRUTH_ANDIF_EXPR:
7826 case TRUTH_ORIF_EXPR:
7827 if (! ignore
7828 && (target == 0 || ! safe_from_p (target, exp, 1)
7829 /* Make sure we don't have a hard reg (such as function's return
7830 value) live across basic blocks, if not optimizing. */
7831 || (!optimize && GET_CODE (target) == REG
7832 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7833 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7835 if (target)
7836 emit_clr_insn (target);
7838 op1 = gen_label_rtx ();
7839 jumpifnot (exp, op1);
7841 if (target)
7842 emit_0_to_1_insn (target);
7844 emit_label (op1);
7845 return ignore ? const0_rtx : target;
7847 case TRUTH_NOT_EXPR:
7848 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7849 /* The parser is careful to generate TRUTH_NOT_EXPR
7850 only with operands that are always zero or one. */
7851 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7852 target, 1, OPTAB_LIB_WIDEN);
7853 if (temp == 0)
7854 abort ();
7855 return temp;
7857 case COMPOUND_EXPR:
7858 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7859 emit_queue ();
7860 return expand_expr (TREE_OPERAND (exp, 1),
7861 (ignore ? const0_rtx : target),
7862 VOIDmode, 0);
7864 case COND_EXPR:
7865 /* If we would have a "singleton" (see below) were it not for a
7866 conversion in each arm, bring that conversion back out. */
7867 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7868 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7869 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7870 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7872 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7873 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7875 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7876 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7877 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7878 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7879 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7880 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7881 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7882 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7883 return expand_expr (build1 (NOP_EXPR, type,
7884 build (COND_EXPR, TREE_TYPE (true),
7885 TREE_OPERAND (exp, 0),
7886 true, false)),
7887 target, tmode, modifier);
7891 /* Note that COND_EXPRs whose type is a structure or union
7892 are required to be constructed to contain assignments of
7893 a temporary variable, so that we can evaluate them here
7894 for side effect only. If type is void, we must do likewise. */
7896 /* If an arm of the branch requires a cleanup,
7897 only that cleanup is performed. */
7899 tree singleton = 0;
7900 tree binary_op = 0, unary_op = 0;
7902 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7903 convert it to our mode, if necessary. */
7904 if (integer_onep (TREE_OPERAND (exp, 1))
7905 && integer_zerop (TREE_OPERAND (exp, 2))
7906 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7908 if (ignore)
7910 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7911 ro_modifier);
7912 return const0_rtx;
7915 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7916 if (GET_MODE (op0) == mode)
7917 return op0;
7919 if (target == 0)
7920 target = gen_reg_rtx (mode);
7921 convert_move (target, op0, unsignedp);
7922 return target;
7925 /* Check for X ? A + B : A. If we have this, we can copy A to the
7926 output and conditionally add B. Similarly for unary operations.
7927 Don't do this if X has side-effects because those side effects
7928 might affect A or B and the "?" operation is a sequence point in
7929 ANSI. (operand_equal_p tests for side effects.) */
7931 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7932 && operand_equal_p (TREE_OPERAND (exp, 2),
7933 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7934 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7935 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7936 && operand_equal_p (TREE_OPERAND (exp, 1),
7937 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7938 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7939 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7940 && operand_equal_p (TREE_OPERAND (exp, 2),
7941 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7942 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7943 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7944 && operand_equal_p (TREE_OPERAND (exp, 1),
7945 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7946 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7948 /* If we are not to produce a result, we have no target. Otherwise,
7949 if a target was specified use it; it will not be used as an
7950 intermediate target unless it is safe. If no target, use a
7951 temporary. */
7953 if (ignore)
7954 temp = 0;
7955 else if (original_target
7956 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7957 || (singleton && GET_CODE (original_target) == REG
7958 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7959 && original_target == var_rtx (singleton)))
7960 && GET_MODE (original_target) == mode
7961 #ifdef HAVE_conditional_move
7962 && (! can_conditionally_move_p (mode)
7963 || GET_CODE (original_target) == REG
7964 || TREE_ADDRESSABLE (type))
7965 #endif
7966 && ! (GET_CODE (original_target) == MEM
7967 && MEM_VOLATILE_P (original_target)))
7968 temp = original_target;
7969 else if (TREE_ADDRESSABLE (type))
7970 abort ();
7971 else
7972 temp = assign_temp (type, 0, 0, 1);
7974 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7975 do the test of X as a store-flag operation, do this as
7976 A + ((X != 0) << log C). Similarly for other simple binary
7977 operators. Only do for C == 1 if BRANCH_COST is low. */
7978 if (temp && singleton && binary_op
7979 && (TREE_CODE (binary_op) == PLUS_EXPR
7980 || TREE_CODE (binary_op) == MINUS_EXPR
7981 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7982 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7983 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7984 : integer_onep (TREE_OPERAND (binary_op, 1)))
7985 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7987 rtx result;
7988 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7989 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7990 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7991 : xor_optab);
7993 /* If we had X ? A : A + 1, do this as A + (X == 0).
7995 We have to invert the truth value here and then put it
7996 back later if do_store_flag fails. We cannot simply copy
7997 TREE_OPERAND (exp, 0) to another variable and modify that
7998 because invert_truthvalue can modify the tree pointed to
7999 by its argument. */
8000 if (singleton == TREE_OPERAND (exp, 1))
8001 TREE_OPERAND (exp, 0)
8002 = invert_truthvalue (TREE_OPERAND (exp, 0));
8004 result = do_store_flag (TREE_OPERAND (exp, 0),
8005 (safe_from_p (temp, singleton, 1)
8006 ? temp : NULL_RTX),
8007 mode, BRANCH_COST <= 1);
8009 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8010 result = expand_shift (LSHIFT_EXPR, mode, result,
8011 build_int_2 (tree_log2
8012 (TREE_OPERAND
8013 (binary_op, 1)),
8015 (safe_from_p (temp, singleton, 1)
8016 ? temp : NULL_RTX), 0);
8018 if (result)
8020 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8021 return expand_binop (mode, boptab, op1, result, temp,
8022 unsignedp, OPTAB_LIB_WIDEN);
8024 else if (singleton == TREE_OPERAND (exp, 1))
8025 TREE_OPERAND (exp, 0)
8026 = invert_truthvalue (TREE_OPERAND (exp, 0));
8029 do_pending_stack_adjust ();
8030 NO_DEFER_POP;
8031 op0 = gen_label_rtx ();
8033 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8035 if (temp != 0)
8037 /* If the target conflicts with the other operand of the
8038 binary op, we can't use it. Also, we can't use the target
8039 if it is a hard register, because evaluating the condition
8040 might clobber it. */
8041 if ((binary_op
8042 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8043 || (GET_CODE (temp) == REG
8044 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8045 temp = gen_reg_rtx (mode);
8046 store_expr (singleton, temp, 0);
8048 else
8049 expand_expr (singleton,
8050 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8051 if (singleton == TREE_OPERAND (exp, 1))
8052 jumpif (TREE_OPERAND (exp, 0), op0);
8053 else
8054 jumpifnot (TREE_OPERAND (exp, 0), op0);
8056 start_cleanup_deferral ();
8057 if (binary_op && temp == 0)
8058 /* Just touch the other operand. */
8059 expand_expr (TREE_OPERAND (binary_op, 1),
8060 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8061 else if (binary_op)
8062 store_expr (build (TREE_CODE (binary_op), type,
8063 make_tree (type, temp),
8064 TREE_OPERAND (binary_op, 1)),
8065 temp, 0);
8066 else
8067 store_expr (build1 (TREE_CODE (unary_op), type,
8068 make_tree (type, temp)),
8069 temp, 0);
8070 op1 = op0;
8072 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8073 comparison operator. If we have one of these cases, set the
8074 output to A, branch on A (cse will merge these two references),
8075 then set the output to FOO. */
8076 else if (temp
8077 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8078 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8079 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8080 TREE_OPERAND (exp, 1), 0)
8081 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8082 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8083 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8085 if (GET_CODE (temp) == REG
8086 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8087 temp = gen_reg_rtx (mode);
8088 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8089 jumpif (TREE_OPERAND (exp, 0), op0);
8091 start_cleanup_deferral ();
8092 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8093 op1 = op0;
8095 else if (temp
8096 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8097 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8098 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8099 TREE_OPERAND (exp, 2), 0)
8100 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8101 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8102 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8104 if (GET_CODE (temp) == REG
8105 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8106 temp = gen_reg_rtx (mode);
8107 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8108 jumpifnot (TREE_OPERAND (exp, 0), op0);
8110 start_cleanup_deferral ();
8111 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8112 op1 = op0;
8114 else
8116 op1 = gen_label_rtx ();
8117 jumpifnot (TREE_OPERAND (exp, 0), op0);
8119 start_cleanup_deferral ();
8121 /* One branch of the cond can be void, if it never returns. For
8122 example A ? throw : E */
8123 if (temp != 0
8124 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8125 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8126 else
8127 expand_expr (TREE_OPERAND (exp, 1),
8128 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8129 end_cleanup_deferral ();
8130 emit_queue ();
8131 emit_jump_insn (gen_jump (op1));
8132 emit_barrier ();
8133 emit_label (op0);
8134 start_cleanup_deferral ();
8135 if (temp != 0
8136 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8137 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8138 else
8139 expand_expr (TREE_OPERAND (exp, 2),
8140 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8143 end_cleanup_deferral ();
8145 emit_queue ();
8146 emit_label (op1);
8147 OK_DEFER_POP;
8149 return temp;
8152 case TARGET_EXPR:
8154 /* Something needs to be initialized, but we didn't know
8155 where that thing was when building the tree. For example,
8156 it could be the return value of a function, or a parameter
8157 to a function which lays down in the stack, or a temporary
8158 variable which must be passed by reference.
8160 We guarantee that the expression will either be constructed
8161 or copied into our original target. */
8163 tree slot = TREE_OPERAND (exp, 0);
8164 tree cleanups = NULL_TREE;
8165 tree exp1;
8167 if (TREE_CODE (slot) != VAR_DECL)
8168 abort ();
8170 if (! ignore)
8171 target = original_target;
8173 /* Set this here so that if we get a target that refers to a
8174 register variable that's already been used, put_reg_into_stack
8175 knows that it should fix up those uses. */
8176 TREE_USED (slot) = 1;
8178 if (target == 0)
8180 if (DECL_RTL (slot) != 0)
8182 target = DECL_RTL (slot);
8183 /* If we have already expanded the slot, so don't do
8184 it again. (mrs) */
8185 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8186 return target;
8188 else
8190 target = assign_temp (type, 2, 0, 1);
8191 /* All temp slots at this level must not conflict. */
8192 preserve_temp_slots (target);
8193 DECL_RTL (slot) = target;
8194 if (TREE_ADDRESSABLE (slot))
8195 put_var_into_stack (slot);
8197 /* Since SLOT is not known to the called function
8198 to belong to its stack frame, we must build an explicit
8199 cleanup. This case occurs when we must build up a reference
8200 to pass the reference as an argument. In this case,
8201 it is very likely that such a reference need not be
8202 built here. */
8204 if (TREE_OPERAND (exp, 2) == 0)
8205 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8206 cleanups = TREE_OPERAND (exp, 2);
8209 else
8211 /* This case does occur, when expanding a parameter which
8212 needs to be constructed on the stack. The target
8213 is the actual stack address that we want to initialize.
8214 The function we call will perform the cleanup in this case. */
8216 /* If we have already assigned it space, use that space,
8217 not target that we were passed in, as our target
8218 parameter is only a hint. */
8219 if (DECL_RTL (slot) != 0)
8221 target = DECL_RTL (slot);
8222 /* If we have already expanded the slot, so don't do
8223 it again. (mrs) */
8224 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8225 return target;
8227 else
8229 DECL_RTL (slot) = target;
8230 /* If we must have an addressable slot, then make sure that
8231 the RTL that we just stored in slot is OK. */
8232 if (TREE_ADDRESSABLE (slot))
8233 put_var_into_stack (slot);
8237 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8238 /* Mark it as expanded. */
8239 TREE_OPERAND (exp, 1) = NULL_TREE;
8241 store_expr (exp1, target, 0);
8243 expand_decl_cleanup (NULL_TREE, cleanups);
8245 return target;
8248 case INIT_EXPR:
8250 tree lhs = TREE_OPERAND (exp, 0);
8251 tree rhs = TREE_OPERAND (exp, 1);
8252 tree noncopied_parts = 0;
8253 tree lhs_type = TREE_TYPE (lhs);
8255 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8256 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8257 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8258 TYPE_NONCOPIED_PARTS (lhs_type));
8259 while (noncopied_parts != 0)
8261 expand_assignment (TREE_VALUE (noncopied_parts),
8262 TREE_PURPOSE (noncopied_parts), 0, 0);
8263 noncopied_parts = TREE_CHAIN (noncopied_parts);
8265 return temp;
8268 case MODIFY_EXPR:
8270 /* If lhs is complex, expand calls in rhs before computing it.
8271 That's so we don't compute a pointer and save it over a call.
8272 If lhs is simple, compute it first so we can give it as a
8273 target if the rhs is just a call. This avoids an extra temp and copy
8274 and that prevents a partial-subsumption which makes bad code.
8275 Actually we could treat component_ref's of vars like vars. */
8277 tree lhs = TREE_OPERAND (exp, 0);
8278 tree rhs = TREE_OPERAND (exp, 1);
8279 tree noncopied_parts = 0;
8280 tree lhs_type = TREE_TYPE (lhs);
8282 temp = 0;
8284 if (TREE_CODE (lhs) != VAR_DECL
8285 && TREE_CODE (lhs) != RESULT_DECL
8286 && TREE_CODE (lhs) != PARM_DECL
8287 && ! (TREE_CODE (lhs) == INDIRECT_REF
8288 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
8289 preexpand_calls (exp);
8291 /* Check for |= or &= of a bitfield of size one into another bitfield
8292 of size 1. In this case, (unless we need the result of the
8293 assignment) we can do this more efficiently with a
8294 test followed by an assignment, if necessary.
8296 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8297 things change so we do, this code should be enhanced to
8298 support it. */
8299 if (ignore
8300 && TREE_CODE (lhs) == COMPONENT_REF
8301 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8302 || TREE_CODE (rhs) == BIT_AND_EXPR)
8303 && TREE_OPERAND (rhs, 0) == lhs
8304 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8305 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8306 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8308 rtx label = gen_label_rtx ();
8310 do_jump (TREE_OPERAND (rhs, 1),
8311 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8312 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8313 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8314 (TREE_CODE (rhs) == BIT_IOR_EXPR
8315 ? integer_one_node
8316 : integer_zero_node)),
8317 0, 0);
8318 do_pending_stack_adjust ();
8319 emit_label (label);
8320 return const0_rtx;
8323 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8324 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8325 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8326 TYPE_NONCOPIED_PARTS (lhs_type));
8328 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8329 while (noncopied_parts != 0)
8331 expand_assignment (TREE_PURPOSE (noncopied_parts),
8332 TREE_VALUE (noncopied_parts), 0, 0);
8333 noncopied_parts = TREE_CHAIN (noncopied_parts);
8335 return temp;
8338 case RETURN_EXPR:
8339 if (!TREE_OPERAND (exp, 0))
8340 expand_null_return ();
8341 else
8342 expand_return (TREE_OPERAND (exp, 0));
8343 return const0_rtx;
8345 case PREINCREMENT_EXPR:
8346 case PREDECREMENT_EXPR:
8347 return expand_increment (exp, 0, ignore);
8349 case POSTINCREMENT_EXPR:
8350 case POSTDECREMENT_EXPR:
8351 /* Faster to treat as pre-increment if result is not used. */
8352 return expand_increment (exp, ! ignore, ignore);
8354 case ADDR_EXPR:
8355 /* If nonzero, TEMP will be set to the address of something that might
8356 be a MEM corresponding to a stack slot. */
8357 temp = 0;
8359 /* Are we taking the address of a nested function? */
8360 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8361 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8362 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8363 && ! TREE_STATIC (exp))
8365 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8366 op0 = force_operand (op0, target);
8368 /* If we are taking the address of something erroneous, just
8369 return a zero. */
8370 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8371 return const0_rtx;
8372 else
8374 /* We make sure to pass const0_rtx down if we came in with
8375 ignore set, to avoid doing the cleanups twice for something. */
8376 op0 = expand_expr (TREE_OPERAND (exp, 0),
8377 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8378 (modifier == EXPAND_INITIALIZER
8379 ? modifier : EXPAND_CONST_ADDRESS));
8381 /* If we are going to ignore the result, OP0 will have been set
8382 to const0_rtx, so just return it. Don't get confused and
8383 think we are taking the address of the constant. */
8384 if (ignore)
8385 return op0;
8387 op0 = protect_from_queue (op0, 0);
8389 /* We would like the object in memory. If it is a constant, we can
8390 have it be statically allocated into memory. For a non-constant,
8391 we need to allocate some memory and store the value into it. */
8393 if (CONSTANT_P (op0))
8394 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8395 op0);
8396 else if (GET_CODE (op0) == MEM)
8398 mark_temp_addr_taken (op0);
8399 temp = XEXP (op0, 0);
8402 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8403 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8405 /* If this object is in a register, it must be not
8406 be BLKmode. */
8407 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8408 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8410 mark_temp_addr_taken (memloc);
8411 emit_move_insn (memloc, op0);
8412 op0 = memloc;
8415 if (GET_CODE (op0) != MEM)
8416 abort ();
8418 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8420 temp = XEXP (op0, 0);
8421 #ifdef POINTERS_EXTEND_UNSIGNED
8422 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8423 && mode == ptr_mode)
8424 temp = convert_memory_address (ptr_mode, temp);
8425 #endif
8426 return temp;
8429 op0 = force_operand (XEXP (op0, 0), target);
8432 if (flag_force_addr && GET_CODE (op0) != REG)
8433 op0 = force_reg (Pmode, op0);
8435 if (GET_CODE (op0) == REG
8436 && ! REG_USERVAR_P (op0))
8437 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8439 /* If we might have had a temp slot, add an equivalent address
8440 for it. */
8441 if (temp != 0)
8442 update_temp_slot_address (temp, op0);
8444 #ifdef POINTERS_EXTEND_UNSIGNED
8445 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8446 && mode == ptr_mode)
8447 op0 = convert_memory_address (ptr_mode, op0);
8448 #endif
8450 return op0;
8452 case ENTRY_VALUE_EXPR:
8453 abort ();
8455 /* COMPLEX type for Extended Pascal & Fortran */
8456 case COMPLEX_EXPR:
8458 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8459 rtx insns;
8461 /* Get the rtx code of the operands. */
8462 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8463 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8465 if (! target)
8466 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8468 start_sequence ();
8470 /* Move the real (op0) and imaginary (op1) parts to their location. */
8471 emit_move_insn (gen_realpart (mode, target), op0);
8472 emit_move_insn (gen_imagpart (mode, target), op1);
8474 insns = get_insns ();
8475 end_sequence ();
8477 /* Complex construction should appear as a single unit. */
8478 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8479 each with a separate pseudo as destination.
8480 It's not correct for flow to treat them as a unit. */
8481 if (GET_CODE (target) != CONCAT)
8482 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8483 else
8484 emit_insns (insns);
8486 return target;
8489 case REALPART_EXPR:
8490 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8491 return gen_realpart (mode, op0);
8493 case IMAGPART_EXPR:
8494 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8495 return gen_imagpart (mode, op0);
8497 case CONJ_EXPR:
8499 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8500 rtx imag_t;
8501 rtx insns;
8503 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8505 if (! target)
8506 target = gen_reg_rtx (mode);
8508 start_sequence ();
8510 /* Store the realpart and the negated imagpart to target. */
8511 emit_move_insn (gen_realpart (partmode, target),
8512 gen_realpart (partmode, op0));
8514 imag_t = gen_imagpart (partmode, target);
8515 temp = expand_unop (partmode, neg_optab,
8516 gen_imagpart (partmode, op0), imag_t, 0);
8517 if (temp != imag_t)
8518 emit_move_insn (imag_t, temp);
8520 insns = get_insns ();
8521 end_sequence ();
8523 /* Conjugate should appear as a single unit
8524 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8525 each with a separate pseudo as destination.
8526 It's not correct for flow to treat them as a unit. */
8527 if (GET_CODE (target) != CONCAT)
8528 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8529 else
8530 emit_insns (insns);
8532 return target;
8535 case TRY_CATCH_EXPR:
8537 tree handler = TREE_OPERAND (exp, 1);
8539 expand_eh_region_start ();
8541 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8543 expand_eh_region_end (handler);
8545 return op0;
8548 case TRY_FINALLY_EXPR:
8550 tree try_block = TREE_OPERAND (exp, 0);
8551 tree finally_block = TREE_OPERAND (exp, 1);
8552 rtx finally_label = gen_label_rtx ();
8553 rtx done_label = gen_label_rtx ();
8554 rtx return_link = gen_reg_rtx (Pmode);
8555 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8556 (tree) finally_label, (tree) return_link);
8557 TREE_SIDE_EFFECTS (cleanup) = 1;
8559 /* Start a new binding layer that will keep track of all cleanup
8560 actions to be performed. */
8561 expand_start_bindings (2);
8563 target_temp_slot_level = temp_slot_level;
8565 expand_decl_cleanup (NULL_TREE, cleanup);
8566 op0 = expand_expr (try_block, target, tmode, modifier);
8568 preserve_temp_slots (op0);
8569 expand_end_bindings (NULL_TREE, 0, 0);
8570 emit_jump (done_label);
8571 emit_label (finally_label);
8572 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8573 emit_indirect_jump (return_link);
8574 emit_label (done_label);
8575 return op0;
8578 case GOTO_SUBROUTINE_EXPR:
8580 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8581 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8582 rtx return_address = gen_label_rtx ();
8583 emit_move_insn (return_link,
8584 gen_rtx_LABEL_REF (Pmode, return_address));
8585 emit_jump (subr);
8586 emit_label (return_address);
8587 return const0_rtx;
8590 case POPDCC_EXPR:
8592 rtx dcc = get_dynamic_cleanup_chain ();
8593 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8594 return const0_rtx;
8597 case POPDHC_EXPR:
8599 rtx dhc = get_dynamic_handler_chain ();
8600 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8601 return const0_rtx;
8604 case VA_ARG_EXPR:
8605 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8607 default:
8608 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8611 /* Here to do an ordinary binary operator, generating an instruction
8612 from the optab already placed in `this_optab'. */
8613 binop:
8614 preexpand_calls (exp);
8615 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8616 subtarget = 0;
8617 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8618 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8619 binop2:
8620 temp = expand_binop (mode, this_optab, op0, op1, target,
8621 unsignedp, OPTAB_LIB_WIDEN);
8622 if (temp == 0)
8623 abort ();
8624 return temp;
8627 /* Similar to expand_expr, except that we don't specify a target, target
8628 mode, or modifier and we return the alignment of the inner type. This is
8629 used in cases where it is not necessary to align the result to the
8630 alignment of its type as long as we know the alignment of the result, for
8631 example for comparisons of BLKmode values. */
8633 static rtx
8634 expand_expr_unaligned (exp, palign)
8635 register tree exp;
8636 unsigned int *palign;
8638 register rtx op0;
8639 tree type = TREE_TYPE (exp);
8640 register enum machine_mode mode = TYPE_MODE (type);
8642 /* Default the alignment we return to that of the type. */
8643 *palign = TYPE_ALIGN (type);
8645 /* The only cases in which we do anything special is if the resulting mode
8646 is BLKmode. */
8647 if (mode != BLKmode)
8648 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8650 switch (TREE_CODE (exp))
8652 case CONVERT_EXPR:
8653 case NOP_EXPR:
8654 case NON_LVALUE_EXPR:
8655 /* Conversions between BLKmode values don't change the underlying
8656 alignment or value. */
8657 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8658 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8659 break;
8661 case ARRAY_REF:
8662 /* Much of the code for this case is copied directly from expand_expr.
8663 We need to duplicate it here because we will do something different
8664 in the fall-through case, so we need to handle the same exceptions
8665 it does. */
8667 tree array = TREE_OPERAND (exp, 0);
8668 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8669 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8670 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8671 HOST_WIDE_INT i;
8673 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8674 abort ();
8676 /* Optimize the special-case of a zero lower bound.
8678 We convert the low_bound to sizetype to avoid some problems
8679 with constant folding. (E.g. suppose the lower bound is 1,
8680 and its mode is QI. Without the conversion, (ARRAY
8681 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8682 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8684 if (! integer_zerop (low_bound))
8685 index = size_diffop (index, convert (sizetype, low_bound));
8687 /* If this is a constant index into a constant array,
8688 just get the value from the array. Handle both the cases when
8689 we have an explicit constructor and when our operand is a variable
8690 that was declared const. */
8692 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8693 && 0 > compare_tree_int (index,
8694 list_length (CONSTRUCTOR_ELTS
8695 (TREE_OPERAND (exp, 0)))))
8697 tree elem;
8699 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8700 i = TREE_INT_CST_LOW (index);
8701 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8704 if (elem)
8705 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8708 else if (optimize >= 1
8709 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8710 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8711 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8713 if (TREE_CODE (index) == INTEGER_CST)
8715 tree init = DECL_INITIAL (array);
8717 if (TREE_CODE (init) == CONSTRUCTOR)
8719 tree elem;
8721 for (elem = CONSTRUCTOR_ELTS (init);
8722 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8723 elem = TREE_CHAIN (elem))
8726 if (elem)
8727 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8728 palign);
8733 /* Fall through. */
8735 case COMPONENT_REF:
8736 case BIT_FIELD_REF:
8737 /* If the operand is a CONSTRUCTOR, we can just extract the
8738 appropriate field if it is present. Don't do this if we have
8739 already written the data since we want to refer to that copy
8740 and varasm.c assumes that's what we'll do. */
8741 if (TREE_CODE (exp) != ARRAY_REF
8742 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8743 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8745 tree elt;
8747 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8748 elt = TREE_CHAIN (elt))
8749 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8750 /* Note that unlike the case in expand_expr, we know this is
8751 BLKmode and hence not an integer. */
8752 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8756 enum machine_mode mode1;
8757 HOST_WIDE_INT bitsize, bitpos;
8758 tree offset;
8759 int volatilep = 0;
8760 unsigned int alignment;
8761 int unsignedp;
8762 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8763 &mode1, &unsignedp, &volatilep,
8764 &alignment);
8766 /* If we got back the original object, something is wrong. Perhaps
8767 we are evaluating an expression too early. In any event, don't
8768 infinitely recurse. */
8769 if (tem == exp)
8770 abort ();
8772 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8774 /* If this is a constant, put it into a register if it is a
8775 legitimate constant and OFFSET is 0 and memory if it isn't. */
8776 if (CONSTANT_P (op0))
8778 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8780 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8781 && offset == 0)
8782 op0 = force_reg (inner_mode, op0);
8783 else
8784 op0 = validize_mem (force_const_mem (inner_mode, op0));
8787 if (offset != 0)
8789 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8791 /* If this object is in a register, put it into memory.
8792 This case can't occur in C, but can in Ada if we have
8793 unchecked conversion of an expression from a scalar type to
8794 an array or record type. */
8795 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8796 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8798 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
8800 mark_temp_addr_taken (memloc);
8801 emit_move_insn (memloc, op0);
8802 op0 = memloc;
8805 if (GET_CODE (op0) != MEM)
8806 abort ();
8808 if (GET_MODE (offset_rtx) != ptr_mode)
8810 #ifdef POINTERS_EXTEND_UNSIGNED
8811 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8812 #else
8813 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8814 #endif
8817 op0 = change_address (op0, VOIDmode,
8818 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
8819 force_reg (ptr_mode,
8820 offset_rtx)));
8823 /* Don't forget about volatility even if this is a bitfield. */
8824 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
8826 op0 = copy_rtx (op0);
8827 MEM_VOLATILE_P (op0) = 1;
8830 /* Check the access. */
8831 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
8833 rtx to;
8834 int size;
8836 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
8837 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
8839 /* Check the access right of the pointer. */
8840 in_check_memory_usage = 1;
8841 if (size > BITS_PER_UNIT)
8842 emit_library_call (chkr_check_addr_libfunc,
8843 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
8844 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
8845 TYPE_MODE (sizetype),
8846 GEN_INT (MEMORY_USE_RO),
8847 TYPE_MODE (integer_type_node));
8848 in_check_memory_usage = 0;
8851 /* In cases where an aligned union has an unaligned object
8852 as a field, we might be extracting a BLKmode value from
8853 an integer-mode (e.g., SImode) object. Handle this case
8854 by doing the extract into an object as wide as the field
8855 (which we know to be the width of a basic mode), then
8856 storing into memory, and changing the mode to BLKmode.
8857 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8858 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8859 if (mode1 == VOIDmode
8860 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8861 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
8862 && (TYPE_ALIGN (type) > alignment
8863 || bitpos % TYPE_ALIGN (type) != 0)))
8865 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
8867 if (ext_mode == BLKmode)
8869 /* In this case, BITPOS must start at a byte boundary. */
8870 if (GET_CODE (op0) != MEM
8871 || bitpos % BITS_PER_UNIT != 0)
8872 abort ();
8874 op0 = change_address (op0, VOIDmode,
8875 plus_constant (XEXP (op0, 0),
8876 bitpos / BITS_PER_UNIT));
8878 else
8880 rtx new = assign_stack_temp (ext_mode,
8881 bitsize / BITS_PER_UNIT, 0);
8883 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
8884 unsignedp, NULL_RTX, ext_mode,
8885 ext_mode, alignment,
8886 int_size_in_bytes (TREE_TYPE (tem)));
8888 /* If the result is a record type and BITSIZE is narrower than
8889 the mode of OP0, an integral mode, and this is a big endian
8890 machine, we must put the field into the high-order bits. */
8891 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
8892 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8893 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
8894 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
8895 size_int (GET_MODE_BITSIZE
8896 (GET_MODE (op0))
8897 - bitsize),
8898 op0, 1);
8900 emit_move_insn (new, op0);
8901 op0 = copy_rtx (new);
8902 PUT_MODE (op0, BLKmode);
8905 else
8906 /* Get a reference to just this component. */
8907 op0 = change_address (op0, mode1,
8908 plus_constant (XEXP (op0, 0),
8909 (bitpos / BITS_PER_UNIT)));
8911 MEM_ALIAS_SET (op0) = get_alias_set (exp);
8913 /* Adjust the alignment in case the bit position is not
8914 a multiple of the alignment of the inner object. */
8915 while (bitpos % alignment != 0)
8916 alignment >>= 1;
8918 if (GET_CODE (XEXP (op0, 0)) == REG)
8919 mark_reg_pointer (XEXP (op0, 0), alignment);
8921 MEM_IN_STRUCT_P (op0) = 1;
8922 MEM_VOLATILE_P (op0) |= volatilep;
8924 *palign = alignment;
8925 return op0;
8928 default:
8929 break;
8933 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8936 /* Return the tree node if a ARG corresponds to a string constant or zero
8937 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8938 in bytes within the string that ARG is accessing. The type of the
8939 offset will be `sizetype'. */
8941 tree
8942 string_constant (arg, ptr_offset)
8943 tree arg;
8944 tree *ptr_offset;
8946 STRIP_NOPS (arg);
8948 if (TREE_CODE (arg) == ADDR_EXPR
8949 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8951 *ptr_offset = size_zero_node;
8952 return TREE_OPERAND (arg, 0);
8954 else if (TREE_CODE (arg) == PLUS_EXPR)
8956 tree arg0 = TREE_OPERAND (arg, 0);
8957 tree arg1 = TREE_OPERAND (arg, 1);
8959 STRIP_NOPS (arg0);
8960 STRIP_NOPS (arg1);
8962 if (TREE_CODE (arg0) == ADDR_EXPR
8963 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8965 *ptr_offset = convert (sizetype, arg1);
8966 return TREE_OPERAND (arg0, 0);
8968 else if (TREE_CODE (arg1) == ADDR_EXPR
8969 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8971 *ptr_offset = convert (sizetype, arg0);
8972 return TREE_OPERAND (arg1, 0);
8976 return 0;
8979 /* Expand code for a post- or pre- increment or decrement
8980 and return the RTX for the result.
8981 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8983 static rtx
8984 expand_increment (exp, post, ignore)
8985 register tree exp;
8986 int post, ignore;
8988 register rtx op0, op1;
8989 register rtx temp, value;
8990 register tree incremented = TREE_OPERAND (exp, 0);
8991 optab this_optab = add_optab;
8992 int icode;
8993 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8994 int op0_is_copy = 0;
8995 int single_insn = 0;
8996 /* 1 means we can't store into OP0 directly,
8997 because it is a subreg narrower than a word,
8998 and we don't dare clobber the rest of the word. */
8999 int bad_subreg = 0;
9001 /* Stabilize any component ref that might need to be
9002 evaluated more than once below. */
9003 if (!post
9004 || TREE_CODE (incremented) == BIT_FIELD_REF
9005 || (TREE_CODE (incremented) == COMPONENT_REF
9006 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9007 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9008 incremented = stabilize_reference (incremented);
9009 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9010 ones into save exprs so that they don't accidentally get evaluated
9011 more than once by the code below. */
9012 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9013 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9014 incremented = save_expr (incremented);
9016 /* Compute the operands as RTX.
9017 Note whether OP0 is the actual lvalue or a copy of it:
9018 I believe it is a copy iff it is a register or subreg
9019 and insns were generated in computing it. */
9021 temp = get_last_insn ();
9022 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9024 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9025 in place but instead must do sign- or zero-extension during assignment,
9026 so we copy it into a new register and let the code below use it as
9027 a copy.
9029 Note that we can safely modify this SUBREG since it is know not to be
9030 shared (it was made by the expand_expr call above). */
9032 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9034 if (post)
9035 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9036 else
9037 bad_subreg = 1;
9039 else if (GET_CODE (op0) == SUBREG
9040 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9042 /* We cannot increment this SUBREG in place. If we are
9043 post-incrementing, get a copy of the old value. Otherwise,
9044 just mark that we cannot increment in place. */
9045 if (post)
9046 op0 = copy_to_reg (op0);
9047 else
9048 bad_subreg = 1;
9051 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9052 && temp != get_last_insn ());
9053 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9054 EXPAND_MEMORY_USE_BAD);
9056 /* Decide whether incrementing or decrementing. */
9057 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9058 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9059 this_optab = sub_optab;
9061 /* Convert decrement by a constant into a negative increment. */
9062 if (this_optab == sub_optab
9063 && GET_CODE (op1) == CONST_INT)
9065 op1 = GEN_INT (-INTVAL (op1));
9066 this_optab = add_optab;
9069 /* For a preincrement, see if we can do this with a single instruction. */
9070 if (!post)
9072 icode = (int) this_optab->handlers[(int) mode].insn_code;
9073 if (icode != (int) CODE_FOR_nothing
9074 /* Make sure that OP0 is valid for operands 0 and 1
9075 of the insn we want to queue. */
9076 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9077 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9078 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9079 single_insn = 1;
9082 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9083 then we cannot just increment OP0. We must therefore contrive to
9084 increment the original value. Then, for postincrement, we can return
9085 OP0 since it is a copy of the old value. For preincrement, expand here
9086 unless we can do it with a single insn.
9088 Likewise if storing directly into OP0 would clobber high bits
9089 we need to preserve (bad_subreg). */
9090 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9092 /* This is the easiest way to increment the value wherever it is.
9093 Problems with multiple evaluation of INCREMENTED are prevented
9094 because either (1) it is a component_ref or preincrement,
9095 in which case it was stabilized above, or (2) it is an array_ref
9096 with constant index in an array in a register, which is
9097 safe to reevaluate. */
9098 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9099 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9100 ? MINUS_EXPR : PLUS_EXPR),
9101 TREE_TYPE (exp),
9102 incremented,
9103 TREE_OPERAND (exp, 1));
9105 while (TREE_CODE (incremented) == NOP_EXPR
9106 || TREE_CODE (incremented) == CONVERT_EXPR)
9108 newexp = convert (TREE_TYPE (incremented), newexp);
9109 incremented = TREE_OPERAND (incremented, 0);
9112 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9113 return post ? op0 : temp;
9116 if (post)
9118 /* We have a true reference to the value in OP0.
9119 If there is an insn to add or subtract in this mode, queue it.
9120 Queueing the increment insn avoids the register shuffling
9121 that often results if we must increment now and first save
9122 the old value for subsequent use. */
9124 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9125 op0 = stabilize (op0);
9126 #endif
9128 icode = (int) this_optab->handlers[(int) mode].insn_code;
9129 if (icode != (int) CODE_FOR_nothing
9130 /* Make sure that OP0 is valid for operands 0 and 1
9131 of the insn we want to queue. */
9132 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9133 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9135 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9136 op1 = force_reg (mode, op1);
9138 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9140 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9142 rtx addr = (general_operand (XEXP (op0, 0), mode)
9143 ? force_reg (Pmode, XEXP (op0, 0))
9144 : copy_to_reg (XEXP (op0, 0)));
9145 rtx temp, result;
9147 op0 = change_address (op0, VOIDmode, addr);
9148 temp = force_reg (GET_MODE (op0), op0);
9149 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9150 op1 = force_reg (mode, op1);
9152 /* The increment queue is LIFO, thus we have to `queue'
9153 the instructions in reverse order. */
9154 enqueue_insn (op0, gen_move_insn (op0, temp));
9155 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9156 return result;
9160 /* Preincrement, or we can't increment with one simple insn. */
9161 if (post)
9162 /* Save a copy of the value before inc or dec, to return it later. */
9163 temp = value = copy_to_reg (op0);
9164 else
9165 /* Arrange to return the incremented value. */
9166 /* Copy the rtx because expand_binop will protect from the queue,
9167 and the results of that would be invalid for us to return
9168 if our caller does emit_queue before using our result. */
9169 temp = copy_rtx (value = op0);
9171 /* Increment however we can. */
9172 op1 = expand_binop (mode, this_optab, value, op1,
9173 current_function_check_memory_usage ? NULL_RTX : op0,
9174 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9175 /* Make sure the value is stored into OP0. */
9176 if (op1 != op0)
9177 emit_move_insn (op0, op1);
9179 return temp;
9182 /* Expand all function calls contained within EXP, innermost ones first.
9183 But don't look within expressions that have sequence points.
9184 For each CALL_EXPR, record the rtx for its value
9185 in the CALL_EXPR_RTL field. */
9187 static void
9188 preexpand_calls (exp)
9189 tree exp;
9191 register int nops, i;
9192 int class = TREE_CODE_CLASS (TREE_CODE (exp));
9194 if (! do_preexpand_calls)
9195 return;
9197 /* Only expressions and references can contain calls. */
9199 if (! IS_EXPR_CODE_CLASS (class) && class != 'r')
9200 return;
9202 switch (TREE_CODE (exp))
9204 case CALL_EXPR:
9205 /* Do nothing if already expanded. */
9206 if (CALL_EXPR_RTL (exp) != 0
9207 /* Do nothing if the call returns a variable-sized object. */
9208 || (TREE_CODE (TREE_TYPE (exp)) != VOID_TYPE
9209 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
9210 /* Do nothing to built-in functions. */
9211 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9212 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9213 == FUNCTION_DECL)
9214 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9215 return;
9217 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9218 return;
9220 case COMPOUND_EXPR:
9221 case COND_EXPR:
9222 case TRUTH_ANDIF_EXPR:
9223 case TRUTH_ORIF_EXPR:
9224 /* If we find one of these, then we can be sure
9225 the adjust will be done for it (since it makes jumps).
9226 Do it now, so that if this is inside an argument
9227 of a function, we don't get the stack adjustment
9228 after some other args have already been pushed. */
9229 do_pending_stack_adjust ();
9230 return;
9232 case BLOCK:
9233 case RTL_EXPR:
9234 case WITH_CLEANUP_EXPR:
9235 case CLEANUP_POINT_EXPR:
9236 case TRY_CATCH_EXPR:
9237 return;
9239 case SAVE_EXPR:
9240 if (SAVE_EXPR_RTL (exp) != 0)
9241 return;
9243 default:
9244 break;
9247 nops = TREE_CODE_LENGTH (TREE_CODE (exp));
9248 for (i = 0; i < nops; i++)
9249 if (TREE_OPERAND (exp, i) != 0)
9251 if (TREE_CODE (exp) == TARGET_EXPR && i == 2)
9252 /* We don't need to preexpand the cleanup for a TARGET_EXPR.
9253 It doesn't happen before the call is made. */
9255 else
9257 class = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9258 if (IS_EXPR_CODE_CLASS (class) || class == 'r')
9259 preexpand_calls (TREE_OPERAND (exp, i));
9264 /* At the start of a function, record that we have no previously-pushed
9265 arguments waiting to be popped. */
9267 void
9268 init_pending_stack_adjust ()
9270 pending_stack_adjust = 0;
9273 /* When exiting from function, if safe, clear out any pending stack adjust
9274 so the adjustment won't get done.
9276 Note, if the current function calls alloca, then it must have a
9277 frame pointer regardless of the value of flag_omit_frame_pointer. */
9279 void
9280 clear_pending_stack_adjust ()
9282 #ifdef EXIT_IGNORE_STACK
9283 if (optimize > 0
9284 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9285 && EXIT_IGNORE_STACK
9286 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9287 && ! flag_inline_functions)
9289 stack_pointer_delta -= pending_stack_adjust,
9290 pending_stack_adjust = 0;
9292 #endif
9295 /* Pop any previously-pushed arguments that have not been popped yet. */
9297 void
9298 do_pending_stack_adjust ()
9300 if (inhibit_defer_pop == 0)
9302 if (pending_stack_adjust != 0)
9303 adjust_stack (GEN_INT (pending_stack_adjust));
9304 pending_stack_adjust = 0;
9308 /* Expand conditional expressions. */
9310 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9311 LABEL is an rtx of code CODE_LABEL, in this function and all the
9312 functions here. */
9314 void
9315 jumpifnot (exp, label)
9316 tree exp;
9317 rtx label;
9319 do_jump (exp, label, NULL_RTX);
9322 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9324 void
9325 jumpif (exp, label)
9326 tree exp;
9327 rtx label;
9329 do_jump (exp, NULL_RTX, label);
9332 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9333 the result is zero, or IF_TRUE_LABEL if the result is one.
9334 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9335 meaning fall through in that case.
9337 do_jump always does any pending stack adjust except when it does not
9338 actually perform a jump. An example where there is no jump
9339 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9341 This function is responsible for optimizing cases such as
9342 &&, || and comparison operators in EXP. */
9344 void
9345 do_jump (exp, if_false_label, if_true_label)
9346 tree exp;
9347 rtx if_false_label, if_true_label;
9349 register enum tree_code code = TREE_CODE (exp);
9350 /* Some cases need to create a label to jump to
9351 in order to properly fall through.
9352 These cases set DROP_THROUGH_LABEL nonzero. */
9353 rtx drop_through_label = 0;
9354 rtx temp;
9355 int i;
9356 tree type;
9357 enum machine_mode mode;
9359 #ifdef MAX_INTEGER_COMPUTATION_MODE
9360 check_max_integer_computation_mode (exp);
9361 #endif
9363 emit_queue ();
9365 switch (code)
9367 case ERROR_MARK:
9368 break;
9370 case INTEGER_CST:
9371 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9372 if (temp)
9373 emit_jump (temp);
9374 break;
9376 #if 0
9377 /* This is not true with #pragma weak */
9378 case ADDR_EXPR:
9379 /* The address of something can never be zero. */
9380 if (if_true_label)
9381 emit_jump (if_true_label);
9382 break;
9383 #endif
9385 case NOP_EXPR:
9386 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9387 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9388 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9389 goto normal;
9390 case CONVERT_EXPR:
9391 /* If we are narrowing the operand, we have to do the compare in the
9392 narrower mode. */
9393 if ((TYPE_PRECISION (TREE_TYPE (exp))
9394 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9395 goto normal;
9396 case NON_LVALUE_EXPR:
9397 case REFERENCE_EXPR:
9398 case ABS_EXPR:
9399 case NEGATE_EXPR:
9400 case LROTATE_EXPR:
9401 case RROTATE_EXPR:
9402 /* These cannot change zero->non-zero or vice versa. */
9403 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9404 break;
9406 case WITH_RECORD_EXPR:
9407 /* Put the object on the placeholder list, recurse through our first
9408 operand, and pop the list. */
9409 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9410 placeholder_list);
9411 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9412 placeholder_list = TREE_CHAIN (placeholder_list);
9413 break;
9415 #if 0
9416 /* This is never less insns than evaluating the PLUS_EXPR followed by
9417 a test and can be longer if the test is eliminated. */
9418 case PLUS_EXPR:
9419 /* Reduce to minus. */
9420 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9421 TREE_OPERAND (exp, 0),
9422 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9423 TREE_OPERAND (exp, 1))));
9424 /* Process as MINUS. */
9425 #endif
9427 case MINUS_EXPR:
9428 /* Non-zero iff operands of minus differ. */
9429 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9430 TREE_OPERAND (exp, 0),
9431 TREE_OPERAND (exp, 1)),
9432 NE, NE, if_false_label, if_true_label);
9433 break;
9435 case BIT_AND_EXPR:
9436 /* If we are AND'ing with a small constant, do this comparison in the
9437 smallest type that fits. If the machine doesn't have comparisons
9438 that small, it will be converted back to the wider comparison.
9439 This helps if we are testing the sign bit of a narrower object.
9440 combine can't do this for us because it can't know whether a
9441 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9443 if (! SLOW_BYTE_ACCESS
9444 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9445 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9446 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9447 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9448 && (type = type_for_mode (mode, 1)) != 0
9449 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9450 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9451 != CODE_FOR_nothing))
9453 do_jump (convert (type, exp), if_false_label, if_true_label);
9454 break;
9456 goto normal;
9458 case TRUTH_NOT_EXPR:
9459 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9460 break;
9462 case TRUTH_ANDIF_EXPR:
9463 if (if_false_label == 0)
9464 if_false_label = drop_through_label = gen_label_rtx ();
9465 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9466 start_cleanup_deferral ();
9467 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9468 end_cleanup_deferral ();
9469 break;
9471 case TRUTH_ORIF_EXPR:
9472 if (if_true_label == 0)
9473 if_true_label = drop_through_label = gen_label_rtx ();
9474 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9475 start_cleanup_deferral ();
9476 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9477 end_cleanup_deferral ();
9478 break;
9480 case COMPOUND_EXPR:
9481 push_temp_slots ();
9482 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9483 preserve_temp_slots (NULL_RTX);
9484 free_temp_slots ();
9485 pop_temp_slots ();
9486 emit_queue ();
9487 do_pending_stack_adjust ();
9488 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9489 break;
9491 case COMPONENT_REF:
9492 case BIT_FIELD_REF:
9493 case ARRAY_REF:
9495 HOST_WIDE_INT bitsize, bitpos;
9496 int unsignedp;
9497 enum machine_mode mode;
9498 tree type;
9499 tree offset;
9500 int volatilep = 0;
9501 unsigned int alignment;
9503 /* Get description of this reference. We don't actually care
9504 about the underlying object here. */
9505 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9506 &unsignedp, &volatilep, &alignment);
9508 type = type_for_size (bitsize, unsignedp);
9509 if (! SLOW_BYTE_ACCESS
9510 && type != 0 && bitsize >= 0
9511 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9512 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9513 != CODE_FOR_nothing))
9515 do_jump (convert (type, exp), if_false_label, if_true_label);
9516 break;
9518 goto normal;
9521 case COND_EXPR:
9522 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9523 if (integer_onep (TREE_OPERAND (exp, 1))
9524 && integer_zerop (TREE_OPERAND (exp, 2)))
9525 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9527 else if (integer_zerop (TREE_OPERAND (exp, 1))
9528 && integer_onep (TREE_OPERAND (exp, 2)))
9529 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9531 else
9533 register rtx label1 = gen_label_rtx ();
9534 drop_through_label = gen_label_rtx ();
9536 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9538 start_cleanup_deferral ();
9539 /* Now the THEN-expression. */
9540 do_jump (TREE_OPERAND (exp, 1),
9541 if_false_label ? if_false_label : drop_through_label,
9542 if_true_label ? if_true_label : drop_through_label);
9543 /* In case the do_jump just above never jumps. */
9544 do_pending_stack_adjust ();
9545 emit_label (label1);
9547 /* Now the ELSE-expression. */
9548 do_jump (TREE_OPERAND (exp, 2),
9549 if_false_label ? if_false_label : drop_through_label,
9550 if_true_label ? if_true_label : drop_through_label);
9551 end_cleanup_deferral ();
9553 break;
9555 case EQ_EXPR:
9557 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9559 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9560 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9562 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9563 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9564 do_jump
9565 (fold
9566 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9567 fold (build (EQ_EXPR, TREE_TYPE (exp),
9568 fold (build1 (REALPART_EXPR,
9569 TREE_TYPE (inner_type),
9570 exp0)),
9571 fold (build1 (REALPART_EXPR,
9572 TREE_TYPE (inner_type),
9573 exp1)))),
9574 fold (build (EQ_EXPR, TREE_TYPE (exp),
9575 fold (build1 (IMAGPART_EXPR,
9576 TREE_TYPE (inner_type),
9577 exp0)),
9578 fold (build1 (IMAGPART_EXPR,
9579 TREE_TYPE (inner_type),
9580 exp1)))))),
9581 if_false_label, if_true_label);
9584 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9585 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9587 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9588 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9589 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9590 else
9591 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9592 break;
9595 case NE_EXPR:
9597 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9599 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9600 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9602 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9603 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9604 do_jump
9605 (fold
9606 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9607 fold (build (NE_EXPR, TREE_TYPE (exp),
9608 fold (build1 (REALPART_EXPR,
9609 TREE_TYPE (inner_type),
9610 exp0)),
9611 fold (build1 (REALPART_EXPR,
9612 TREE_TYPE (inner_type),
9613 exp1)))),
9614 fold (build (NE_EXPR, TREE_TYPE (exp),
9615 fold (build1 (IMAGPART_EXPR,
9616 TREE_TYPE (inner_type),
9617 exp0)),
9618 fold (build1 (IMAGPART_EXPR,
9619 TREE_TYPE (inner_type),
9620 exp1)))))),
9621 if_false_label, if_true_label);
9624 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9625 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9627 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9628 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9629 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9630 else
9631 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9632 break;
9635 case LT_EXPR:
9636 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9637 if (GET_MODE_CLASS (mode) == MODE_INT
9638 && ! can_compare_p (LT, mode, ccp_jump))
9639 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9640 else
9641 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9642 break;
9644 case LE_EXPR:
9645 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9646 if (GET_MODE_CLASS (mode) == MODE_INT
9647 && ! can_compare_p (LE, mode, ccp_jump))
9648 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9649 else
9650 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9651 break;
9653 case GT_EXPR:
9654 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9655 if (GET_MODE_CLASS (mode) == MODE_INT
9656 && ! can_compare_p (GT, mode, ccp_jump))
9657 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9658 else
9659 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9660 break;
9662 case GE_EXPR:
9663 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9664 if (GET_MODE_CLASS (mode) == MODE_INT
9665 && ! can_compare_p (GE, mode, ccp_jump))
9666 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9667 else
9668 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9669 break;
9671 case UNORDERED_EXPR:
9672 case ORDERED_EXPR:
9674 enum rtx_code cmp, rcmp;
9675 int do_rev;
9677 if (code == UNORDERED_EXPR)
9678 cmp = UNORDERED, rcmp = ORDERED;
9679 else
9680 cmp = ORDERED, rcmp = UNORDERED;
9681 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9683 do_rev = 0;
9684 if (! can_compare_p (cmp, mode, ccp_jump)
9685 && (can_compare_p (rcmp, mode, ccp_jump)
9686 /* If the target doesn't provide either UNORDERED or ORDERED
9687 comparisons, canonicalize on UNORDERED for the library. */
9688 || rcmp == UNORDERED))
9689 do_rev = 1;
9691 if (! do_rev)
9692 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9693 else
9694 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9696 break;
9699 enum rtx_code rcode1;
9700 enum tree_code tcode2;
9702 case UNLT_EXPR:
9703 rcode1 = UNLT;
9704 tcode2 = LT_EXPR;
9705 goto unordered_bcc;
9706 case UNLE_EXPR:
9707 rcode1 = UNLE;
9708 tcode2 = LE_EXPR;
9709 goto unordered_bcc;
9710 case UNGT_EXPR:
9711 rcode1 = UNGT;
9712 tcode2 = GT_EXPR;
9713 goto unordered_bcc;
9714 case UNGE_EXPR:
9715 rcode1 = UNGE;
9716 tcode2 = GE_EXPR;
9717 goto unordered_bcc;
9718 case UNEQ_EXPR:
9719 rcode1 = UNEQ;
9720 tcode2 = EQ_EXPR;
9721 goto unordered_bcc;
9723 unordered_bcc:
9724 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9725 if (can_compare_p (rcode1, mode, ccp_jump))
9726 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9727 if_true_label);
9728 else
9730 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9731 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9732 tree cmp0, cmp1;
9734 /* If the target doesn't support combined unordered
9735 compares, decompose into UNORDERED + comparison. */
9736 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9737 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9738 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9739 do_jump (exp, if_false_label, if_true_label);
9742 break;
9744 default:
9745 normal:
9746 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9747 #if 0
9748 /* This is not needed any more and causes poor code since it causes
9749 comparisons and tests from non-SI objects to have different code
9750 sequences. */
9751 /* Copy to register to avoid generating bad insns by cse
9752 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9753 if (!cse_not_expected && GET_CODE (temp) == MEM)
9754 temp = copy_to_reg (temp);
9755 #endif
9756 do_pending_stack_adjust ();
9757 /* Do any postincrements in the expression that was tested. */
9758 emit_queue ();
9760 if (GET_CODE (temp) == CONST_INT || GET_CODE (temp) == LABEL_REF)
9762 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9763 if (target)
9764 emit_jump (target);
9766 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9767 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9768 /* Note swapping the labels gives us not-equal. */
9769 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9770 else if (GET_MODE (temp) != VOIDmode)
9771 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9772 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9773 GET_MODE (temp), NULL_RTX, 0,
9774 if_false_label, if_true_label);
9775 else
9776 abort ();
9779 if (drop_through_label)
9781 /* If do_jump produces code that might be jumped around,
9782 do any stack adjusts from that code, before the place
9783 where control merges in. */
9784 do_pending_stack_adjust ();
9785 emit_label (drop_through_label);
9789 /* Given a comparison expression EXP for values too wide to be compared
9790 with one insn, test the comparison and jump to the appropriate label.
9791 The code of EXP is ignored; we always test GT if SWAP is 0,
9792 and LT if SWAP is 1. */
9794 static void
9795 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9796 tree exp;
9797 int swap;
9798 rtx if_false_label, if_true_label;
9800 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9801 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9802 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9803 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9805 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9808 /* Compare OP0 with OP1, word at a time, in mode MODE.
9809 UNSIGNEDP says to do unsigned comparison.
9810 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9812 void
9813 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9814 enum machine_mode mode;
9815 int unsignedp;
9816 rtx op0, op1;
9817 rtx if_false_label, if_true_label;
9819 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9820 rtx drop_through_label = 0;
9821 int i;
9823 if (! if_true_label || ! if_false_label)
9824 drop_through_label = gen_label_rtx ();
9825 if (! if_true_label)
9826 if_true_label = drop_through_label;
9827 if (! if_false_label)
9828 if_false_label = drop_through_label;
9830 /* Compare a word at a time, high order first. */
9831 for (i = 0; i < nwords; i++)
9833 rtx op0_word, op1_word;
9835 if (WORDS_BIG_ENDIAN)
9837 op0_word = operand_subword_force (op0, i, mode);
9838 op1_word = operand_subword_force (op1, i, mode);
9840 else
9842 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9843 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9846 /* All but high-order word must be compared as unsigned. */
9847 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9848 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9849 NULL_RTX, if_true_label);
9851 /* Consider lower words only if these are equal. */
9852 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9853 NULL_RTX, 0, NULL_RTX, if_false_label);
9856 if (if_false_label)
9857 emit_jump (if_false_label);
9858 if (drop_through_label)
9859 emit_label (drop_through_label);
9862 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9863 with one insn, test the comparison and jump to the appropriate label. */
9865 static void
9866 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9867 tree exp;
9868 rtx if_false_label, if_true_label;
9870 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9871 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9872 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9873 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9874 int i;
9875 rtx drop_through_label = 0;
9877 if (! if_false_label)
9878 drop_through_label = if_false_label = gen_label_rtx ();
9880 for (i = 0; i < nwords; i++)
9881 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9882 operand_subword_force (op1, i, mode),
9883 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9884 word_mode, NULL_RTX, 0, if_false_label,
9885 NULL_RTX);
9887 if (if_true_label)
9888 emit_jump (if_true_label);
9889 if (drop_through_label)
9890 emit_label (drop_through_label);
9893 /* Jump according to whether OP0 is 0.
9894 We assume that OP0 has an integer mode that is too wide
9895 for the available compare insns. */
9897 void
9898 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9899 rtx op0;
9900 rtx if_false_label, if_true_label;
9902 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9903 rtx part;
9904 int i;
9905 rtx drop_through_label = 0;
9907 /* The fastest way of doing this comparison on almost any machine is to
9908 "or" all the words and compare the result. If all have to be loaded
9909 from memory and this is a very wide item, it's possible this may
9910 be slower, but that's highly unlikely. */
9912 part = gen_reg_rtx (word_mode);
9913 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9914 for (i = 1; i < nwords && part != 0; i++)
9915 part = expand_binop (word_mode, ior_optab, part,
9916 operand_subword_force (op0, i, GET_MODE (op0)),
9917 part, 1, OPTAB_WIDEN);
9919 if (part != 0)
9921 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9922 NULL_RTX, 0, if_false_label, if_true_label);
9924 return;
9927 /* If we couldn't do the "or" simply, do this with a series of compares. */
9928 if (! if_false_label)
9929 drop_through_label = if_false_label = gen_label_rtx ();
9931 for (i = 0; i < nwords; i++)
9932 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9933 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9934 if_false_label, NULL_RTX);
9936 if (if_true_label)
9937 emit_jump (if_true_label);
9939 if (drop_through_label)
9940 emit_label (drop_through_label);
9943 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9944 (including code to compute the values to be compared)
9945 and set (CC0) according to the result.
9946 The decision as to signed or unsigned comparison must be made by the caller.
9948 We force a stack adjustment unless there are currently
9949 things pushed on the stack that aren't yet used.
9951 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9952 compared.
9954 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9955 size of MODE should be used. */
9958 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9959 register rtx op0, op1;
9960 enum rtx_code code;
9961 int unsignedp;
9962 enum machine_mode mode;
9963 rtx size;
9964 unsigned int align;
9966 rtx tem;
9968 /* If one operand is constant, make it the second one. Only do this
9969 if the other operand is not constant as well. */
9971 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9972 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9974 tem = op0;
9975 op0 = op1;
9976 op1 = tem;
9977 code = swap_condition (code);
9980 if (flag_force_mem)
9982 op0 = force_not_mem (op0);
9983 op1 = force_not_mem (op1);
9986 do_pending_stack_adjust ();
9988 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9989 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9990 return tem;
9992 #if 0
9993 /* There's no need to do this now that combine.c can eliminate lots of
9994 sign extensions. This can be less efficient in certain cases on other
9995 machines. */
9997 /* If this is a signed equality comparison, we can do it as an
9998 unsigned comparison since zero-extension is cheaper than sign
9999 extension and comparisons with zero are done as unsigned. This is
10000 the case even on machines that can do fast sign extension, since
10001 zero-extension is easier to combine with other operations than
10002 sign-extension is. If we are comparing against a constant, we must
10003 convert it to what it would look like unsigned. */
10004 if ((code == EQ || code == NE) && ! unsignedp
10005 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10007 if (GET_CODE (op1) == CONST_INT
10008 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10009 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10010 unsignedp = 1;
10012 #endif
10014 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10016 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10019 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10020 The decision as to signed or unsigned comparison must be made by the caller.
10022 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10023 compared.
10025 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10026 size of MODE should be used. */
10028 void
10029 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
10030 if_false_label, if_true_label)
10031 register rtx op0, op1;
10032 enum rtx_code code;
10033 int unsignedp;
10034 enum machine_mode mode;
10035 rtx size;
10036 unsigned int align;
10037 rtx if_false_label, if_true_label;
10039 rtx tem;
10040 int dummy_true_label = 0;
10042 /* Reverse the comparison if that is safe and we want to jump if it is
10043 false. */
10044 if (! if_true_label && ! FLOAT_MODE_P (mode))
10046 if_true_label = if_false_label;
10047 if_false_label = 0;
10048 code = reverse_condition (code);
10051 /* If one operand is constant, make it the second one. Only do this
10052 if the other operand is not constant as well. */
10054 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10055 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10057 tem = op0;
10058 op0 = op1;
10059 op1 = tem;
10060 code = swap_condition (code);
10063 if (flag_force_mem)
10065 op0 = force_not_mem (op0);
10066 op1 = force_not_mem (op1);
10069 do_pending_stack_adjust ();
10071 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10072 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10074 if (tem == const_true_rtx)
10076 if (if_true_label)
10077 emit_jump (if_true_label);
10079 else
10081 if (if_false_label)
10082 emit_jump (if_false_label);
10084 return;
10087 #if 0
10088 /* There's no need to do this now that combine.c can eliminate lots of
10089 sign extensions. This can be less efficient in certain cases on other
10090 machines. */
10092 /* If this is a signed equality comparison, we can do it as an
10093 unsigned comparison since zero-extension is cheaper than sign
10094 extension and comparisons with zero are done as unsigned. This is
10095 the case even on machines that can do fast sign extension, since
10096 zero-extension is easier to combine with other operations than
10097 sign-extension is. If we are comparing against a constant, we must
10098 convert it to what it would look like unsigned. */
10099 if ((code == EQ || code == NE) && ! unsignedp
10100 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10102 if (GET_CODE (op1) == CONST_INT
10103 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10104 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10105 unsignedp = 1;
10107 #endif
10109 if (! if_true_label)
10111 dummy_true_label = 1;
10112 if_true_label = gen_label_rtx ();
10115 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10116 if_true_label);
10118 if (if_false_label)
10119 emit_jump (if_false_label);
10120 if (dummy_true_label)
10121 emit_label (if_true_label);
10124 /* Generate code for a comparison expression EXP (including code to compute
10125 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10126 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10127 generated code will drop through.
10128 SIGNED_CODE should be the rtx operation for this comparison for
10129 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10131 We force a stack adjustment unless there are currently
10132 things pushed on the stack that aren't yet used. */
10134 static void
10135 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10136 if_true_label)
10137 register tree exp;
10138 enum rtx_code signed_code, unsigned_code;
10139 rtx if_false_label, if_true_label;
10141 unsigned int align0, align1;
10142 register rtx op0, op1;
10143 register tree type;
10144 register enum machine_mode mode;
10145 int unsignedp;
10146 enum rtx_code code;
10148 /* Don't crash if the comparison was erroneous. */
10149 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10150 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10151 return;
10153 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10154 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10155 mode = TYPE_MODE (type);
10156 unsignedp = TREE_UNSIGNED (type);
10157 code = unsignedp ? unsigned_code : signed_code;
10159 #ifdef HAVE_canonicalize_funcptr_for_compare
10160 /* If function pointers need to be "canonicalized" before they can
10161 be reliably compared, then canonicalize them. */
10162 if (HAVE_canonicalize_funcptr_for_compare
10163 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10164 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10165 == FUNCTION_TYPE))
10167 rtx new_op0 = gen_reg_rtx (mode);
10169 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10170 op0 = new_op0;
10173 if (HAVE_canonicalize_funcptr_for_compare
10174 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10175 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10176 == FUNCTION_TYPE))
10178 rtx new_op1 = gen_reg_rtx (mode);
10180 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10181 op1 = new_op1;
10183 #endif
10185 /* Do any postincrements in the expression that was tested. */
10186 emit_queue ();
10188 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10189 ((mode == BLKmode)
10190 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10191 MIN (align0, align1),
10192 if_false_label, if_true_label);
10195 /* Generate code to calculate EXP using a store-flag instruction
10196 and return an rtx for the result. EXP is either a comparison
10197 or a TRUTH_NOT_EXPR whose operand is a comparison.
10199 If TARGET is nonzero, store the result there if convenient.
10201 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10202 cheap.
10204 Return zero if there is no suitable set-flag instruction
10205 available on this machine.
10207 Once expand_expr has been called on the arguments of the comparison,
10208 we are committed to doing the store flag, since it is not safe to
10209 re-evaluate the expression. We emit the store-flag insn by calling
10210 emit_store_flag, but only expand the arguments if we have a reason
10211 to believe that emit_store_flag will be successful. If we think that
10212 it will, but it isn't, we have to simulate the store-flag with a
10213 set/jump/set sequence. */
10215 static rtx
10216 do_store_flag (exp, target, mode, only_cheap)
10217 tree exp;
10218 rtx target;
10219 enum machine_mode mode;
10220 int only_cheap;
10222 enum rtx_code code;
10223 tree arg0, arg1, type;
10224 tree tem;
10225 enum machine_mode operand_mode;
10226 int invert = 0;
10227 int unsignedp;
10228 rtx op0, op1;
10229 enum insn_code icode;
10230 rtx subtarget = target;
10231 rtx result, label;
10233 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10234 result at the end. We can't simply invert the test since it would
10235 have already been inverted if it were valid. This case occurs for
10236 some floating-point comparisons. */
10238 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10239 invert = 1, exp = TREE_OPERAND (exp, 0);
10241 arg0 = TREE_OPERAND (exp, 0);
10242 arg1 = TREE_OPERAND (exp, 1);
10243 type = TREE_TYPE (arg0);
10244 operand_mode = TYPE_MODE (type);
10245 unsignedp = TREE_UNSIGNED (type);
10247 /* We won't bother with BLKmode store-flag operations because it would mean
10248 passing a lot of information to emit_store_flag. */
10249 if (operand_mode == BLKmode)
10250 return 0;
10252 /* We won't bother with store-flag operations involving function pointers
10253 when function pointers must be canonicalized before comparisons. */
10254 #ifdef HAVE_canonicalize_funcptr_for_compare
10255 if (HAVE_canonicalize_funcptr_for_compare
10256 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10257 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10258 == FUNCTION_TYPE))
10259 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10260 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10261 == FUNCTION_TYPE))))
10262 return 0;
10263 #endif
10265 STRIP_NOPS (arg0);
10266 STRIP_NOPS (arg1);
10268 /* Get the rtx comparison code to use. We know that EXP is a comparison
10269 operation of some type. Some comparisons against 1 and -1 can be
10270 converted to comparisons with zero. Do so here so that the tests
10271 below will be aware that we have a comparison with zero. These
10272 tests will not catch constants in the first operand, but constants
10273 are rarely passed as the first operand. */
10275 switch (TREE_CODE (exp))
10277 case EQ_EXPR:
10278 code = EQ;
10279 break;
10280 case NE_EXPR:
10281 code = NE;
10282 break;
10283 case LT_EXPR:
10284 if (integer_onep (arg1))
10285 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10286 else
10287 code = unsignedp ? LTU : LT;
10288 break;
10289 case LE_EXPR:
10290 if (! unsignedp && integer_all_onesp (arg1))
10291 arg1 = integer_zero_node, code = LT;
10292 else
10293 code = unsignedp ? LEU : LE;
10294 break;
10295 case GT_EXPR:
10296 if (! unsignedp && integer_all_onesp (arg1))
10297 arg1 = integer_zero_node, code = GE;
10298 else
10299 code = unsignedp ? GTU : GT;
10300 break;
10301 case GE_EXPR:
10302 if (integer_onep (arg1))
10303 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10304 else
10305 code = unsignedp ? GEU : GE;
10306 break;
10308 case UNORDERED_EXPR:
10309 code = UNORDERED;
10310 break;
10311 case ORDERED_EXPR:
10312 code = ORDERED;
10313 break;
10314 case UNLT_EXPR:
10315 code = UNLT;
10316 break;
10317 case UNLE_EXPR:
10318 code = UNLE;
10319 break;
10320 case UNGT_EXPR:
10321 code = UNGT;
10322 break;
10323 case UNGE_EXPR:
10324 code = UNGE;
10325 break;
10326 case UNEQ_EXPR:
10327 code = UNEQ;
10328 break;
10330 default:
10331 abort ();
10334 /* Put a constant second. */
10335 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10337 tem = arg0; arg0 = arg1; arg1 = tem;
10338 code = swap_condition (code);
10341 /* If this is an equality or inequality test of a single bit, we can
10342 do this by shifting the bit being tested to the low-order bit and
10343 masking the result with the constant 1. If the condition was EQ,
10344 we xor it with 1. This does not require an scc insn and is faster
10345 than an scc insn even if we have it. */
10347 if ((code == NE || code == EQ)
10348 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10349 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10351 tree inner = TREE_OPERAND (arg0, 0);
10352 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10353 int ops_unsignedp;
10355 /* If INNER is a right shift of a constant and it plus BITNUM does
10356 not overflow, adjust BITNUM and INNER. */
10358 if (TREE_CODE (inner) == RSHIFT_EXPR
10359 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10360 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10361 && bitnum < TYPE_PRECISION (type)
10362 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10363 bitnum - TYPE_PRECISION (type)))
10365 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10366 inner = TREE_OPERAND (inner, 0);
10369 /* If we are going to be able to omit the AND below, we must do our
10370 operations as unsigned. If we must use the AND, we have a choice.
10371 Normally unsigned is faster, but for some machines signed is. */
10372 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10373 #ifdef LOAD_EXTEND_OP
10374 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10375 #else
10377 #endif
10380 if (! get_subtarget (subtarget)
10381 || GET_MODE (subtarget) != operand_mode
10382 || ! safe_from_p (subtarget, inner, 1))
10383 subtarget = 0;
10385 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10387 if (bitnum != 0)
10388 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10389 size_int (bitnum), subtarget, ops_unsignedp);
10391 if (GET_MODE (op0) != mode)
10392 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10394 if ((code == EQ && ! invert) || (code == NE && invert))
10395 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10396 ops_unsignedp, OPTAB_LIB_WIDEN);
10398 /* Put the AND last so it can combine with more things. */
10399 if (bitnum != TYPE_PRECISION (type) - 1)
10400 op0 = expand_and (op0, const1_rtx, subtarget);
10402 return op0;
10405 /* Now see if we are likely to be able to do this. Return if not. */
10406 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10407 return 0;
10409 icode = setcc_gen_code[(int) code];
10410 if (icode == CODE_FOR_nothing
10411 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10413 /* We can only do this if it is one of the special cases that
10414 can be handled without an scc insn. */
10415 if ((code == LT && integer_zerop (arg1))
10416 || (! only_cheap && code == GE && integer_zerop (arg1)))
10418 else if (BRANCH_COST >= 0
10419 && ! only_cheap && (code == NE || code == EQ)
10420 && TREE_CODE (type) != REAL_TYPE
10421 && ((abs_optab->handlers[(int) operand_mode].insn_code
10422 != CODE_FOR_nothing)
10423 || (ffs_optab->handlers[(int) operand_mode].insn_code
10424 != CODE_FOR_nothing)))
10426 else
10427 return 0;
10430 preexpand_calls (exp);
10431 if (! get_subtarget (target)
10432 || GET_MODE (subtarget) != operand_mode
10433 || ! safe_from_p (subtarget, arg1, 1))
10434 subtarget = 0;
10436 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10437 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10439 if (target == 0)
10440 target = gen_reg_rtx (mode);
10442 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10443 because, if the emit_store_flag does anything it will succeed and
10444 OP0 and OP1 will not be used subsequently. */
10446 result = emit_store_flag (target, code,
10447 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10448 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10449 operand_mode, unsignedp, 1);
10451 if (result)
10453 if (invert)
10454 result = expand_binop (mode, xor_optab, result, const1_rtx,
10455 result, 0, OPTAB_LIB_WIDEN);
10456 return result;
10459 /* If this failed, we have to do this with set/compare/jump/set code. */
10460 if (GET_CODE (target) != REG
10461 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10462 target = gen_reg_rtx (GET_MODE (target));
10464 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10465 result = compare_from_rtx (op0, op1, code, unsignedp,
10466 operand_mode, NULL_RTX, 0);
10467 if (GET_CODE (result) == CONST_INT)
10468 return (((result == const0_rtx && ! invert)
10469 || (result != const0_rtx && invert))
10470 ? const0_rtx : const1_rtx);
10472 label = gen_label_rtx ();
10473 if (bcc_gen_fctn[(int) code] == 0)
10474 abort ();
10476 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10477 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10478 emit_label (label);
10480 return target;
10483 /* Generate a tablejump instruction (used for switch statements). */
10485 #ifdef HAVE_tablejump
10487 /* INDEX is the value being switched on, with the lowest value
10488 in the table already subtracted.
10489 MODE is its expected mode (needed if INDEX is constant).
10490 RANGE is the length of the jump table.
10491 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10493 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10494 index value is out of range. */
10496 void
10497 do_tablejump (index, mode, range, table_label, default_label)
10498 rtx index, range, table_label, default_label;
10499 enum machine_mode mode;
10501 register rtx temp, vector;
10503 /* Do an unsigned comparison (in the proper mode) between the index
10504 expression and the value which represents the length of the range.
10505 Since we just finished subtracting the lower bound of the range
10506 from the index expression, this comparison allows us to simultaneously
10507 check that the original index expression value is both greater than
10508 or equal to the minimum value of the range and less than or equal to
10509 the maximum value of the range. */
10511 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10512 0, default_label);
10514 /* If index is in range, it must fit in Pmode.
10515 Convert to Pmode so we can index with it. */
10516 if (mode != Pmode)
10517 index = convert_to_mode (Pmode, index, 1);
10519 /* Don't let a MEM slip thru, because then INDEX that comes
10520 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10521 and break_out_memory_refs will go to work on it and mess it up. */
10522 #ifdef PIC_CASE_VECTOR_ADDRESS
10523 if (flag_pic && GET_CODE (index) != REG)
10524 index = copy_to_mode_reg (Pmode, index);
10525 #endif
10527 /* If flag_force_addr were to affect this address
10528 it could interfere with the tricky assumptions made
10529 about addresses that contain label-refs,
10530 which may be valid only very near the tablejump itself. */
10531 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10532 GET_MODE_SIZE, because this indicates how large insns are. The other
10533 uses should all be Pmode, because they are addresses. This code
10534 could fail if addresses and insns are not the same size. */
10535 index = gen_rtx_PLUS (Pmode,
10536 gen_rtx_MULT (Pmode, index,
10537 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10538 gen_rtx_LABEL_REF (Pmode, table_label));
10539 #ifdef PIC_CASE_VECTOR_ADDRESS
10540 if (flag_pic)
10541 index = PIC_CASE_VECTOR_ADDRESS (index);
10542 else
10543 #endif
10544 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10545 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10546 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10547 RTX_UNCHANGING_P (vector) = 1;
10548 convert_move (temp, vector, 0);
10550 emit_jump_insn (gen_tablejump (temp, table_label));
10552 /* If we are generating PIC code or if the table is PC-relative, the
10553 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10554 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10555 emit_barrier ();
10558 #endif /* HAVE_tablejump */