* Makefile.in (rtlanal.o): Depend on $(TM_P_H).
[official-gcc.git] / gcc / expr.c
blobf486b32e4fdc4b86b2aaa299b7b3add0ed7ad610
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-config.h"
34 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
35 #include "expr.h"
36 #include "optabs.h"
37 #include "libfuncs.h"
38 #include "recog.h"
39 #include "reload.h"
40 #include "output.h"
41 #include "typeclass.h"
42 #include "toplev.h"
43 #include "ggc.h"
44 #include "intl.h"
45 #include "tm_p.h"
47 /* Decide whether a function's arguments should be processed
48 from first to last or from last to first.
50 They should if the stack and args grow in opposite directions, but
51 only if we have push insns. */
53 #ifdef PUSH_ROUNDING
55 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
56 #define PUSH_ARGS_REVERSED /* If it's last to first. */
57 #endif
59 #endif
61 #ifndef STACK_PUSH_CODE
62 #ifdef STACK_GROWS_DOWNWARD
63 #define STACK_PUSH_CODE PRE_DEC
64 #else
65 #define STACK_PUSH_CODE PRE_INC
66 #endif
67 #endif
69 /* Assume that case vectors are not pc-relative. */
70 #ifndef CASE_VECTOR_PC_RELATIVE
71 #define CASE_VECTOR_PC_RELATIVE 0
72 #endif
74 /* Hook called by safe_from_p for language-specific tree codes. It is
75 up to the language front-end to install a hook if it has any such
76 codes that safe_from_p needs to know about. Since same_from_p will
77 recursively explore the TREE_OPERANDs of an expression, this hook
78 should not reexamine those pieces. This routine may recursively
79 call safe_from_p; it should always pass `0' as the TOP_P
80 parameter. */
81 int (*lang_safe_from_p) PARAMS ((rtx, tree));
83 /* If this is nonzero, we do not bother generating VOLATILE
84 around volatile memory references, and we are willing to
85 output indirect addresses. If cse is to follow, we reject
86 indirect addresses so a useful potential cse is generated;
87 if it is used only once, instruction combination will produce
88 the same indirect address eventually. */
89 int cse_not_expected;
91 /* Don't check memory usage, since code is being emitted to check a memory
92 usage. Used when current_function_check_memory_usage is true, to avoid
93 infinite recursion. */
94 static int in_check_memory_usage;
96 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
97 static tree placeholder_list = 0;
99 /* This structure is used by move_by_pieces to describe the move to
100 be performed. */
101 struct move_by_pieces
103 rtx to;
104 rtx to_addr;
105 int autinc_to;
106 int explicit_inc_to;
107 rtx from;
108 rtx from_addr;
109 int autinc_from;
110 int explicit_inc_from;
111 unsigned HOST_WIDE_INT len;
112 HOST_WIDE_INT offset;
113 int reverse;
116 /* This structure is used by store_by_pieces to describe the clear to
117 be performed. */
119 struct store_by_pieces
121 rtx to;
122 rtx to_addr;
123 int autinc_to;
124 int explicit_inc_to;
125 unsigned HOST_WIDE_INT len;
126 HOST_WIDE_INT offset;
127 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
128 PTR constfundata;
129 int reverse;
132 extern struct obstack permanent_obstack;
134 static rtx get_push_address PARAMS ((int));
136 static rtx enqueue_insn PARAMS ((rtx, rtx));
137 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
138 PARAMS ((unsigned HOST_WIDE_INT,
139 unsigned int));
140 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
141 struct move_by_pieces *));
142 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
143 enum machine_mode));
144 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
145 unsigned int));
146 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
147 unsigned int));
148 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
149 enum machine_mode,
150 struct store_by_pieces *));
151 static rtx get_subtarget PARAMS ((rtx));
152 static int is_zeros_p PARAMS ((tree));
153 static int mostly_zeros_p PARAMS ((tree));
154 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
155 HOST_WIDE_INT, enum machine_mode,
156 tree, tree, unsigned int, int,
157 int));
158 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
159 HOST_WIDE_INT));
160 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
161 HOST_WIDE_INT, enum machine_mode,
162 tree, enum machine_mode, int,
163 unsigned int, HOST_WIDE_INT, int));
164 static enum memory_use_mode
165 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
166 static rtx var_rtx PARAMS ((tree));
167 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
168 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
169 static rtx expand_increment PARAMS ((tree, int, int));
170 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
171 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
172 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
173 rtx, rtx));
174 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
175 #ifdef PUSH_ROUNDING
176 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
177 #endif
178 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
180 /* Record for each mode whether we can move a register directly to or
181 from an object of that mode in memory. If we can't, we won't try
182 to use that mode directly when accessing a field of that mode. */
184 static char direct_load[NUM_MACHINE_MODES];
185 static char direct_store[NUM_MACHINE_MODES];
187 /* If a memory-to-memory move would take MOVE_RATIO or more simple
188 move-instruction sequences, we will do a movstr or libcall instead. */
190 #ifndef MOVE_RATIO
191 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
192 #define MOVE_RATIO 2
193 #else
194 /* If we are optimizing for space (-Os), cut down the default move ratio. */
195 #define MOVE_RATIO (optimize_size ? 3 : 15)
196 #endif
197 #endif
199 /* This macro is used to determine whether move_by_pieces should be called
200 to perform a structure copy. */
201 #ifndef MOVE_BY_PIECES_P
202 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
203 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
204 #endif
206 /* This array records the insn_code of insns to perform block moves. */
207 enum insn_code movstr_optab[NUM_MACHINE_MODES];
209 /* This array records the insn_code of insns to perform block clears. */
210 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
212 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
214 #ifndef SLOW_UNALIGNED_ACCESS
215 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
216 #endif
218 /* This is run once per compilation to set up which modes can be used
219 directly in memory and to initialize the block move optab. */
221 void
222 init_expr_once ()
224 rtx insn, pat;
225 enum machine_mode mode;
226 int num_clobbers;
227 rtx mem, mem1;
229 start_sequence ();
231 /* Try indexing by frame ptr and try by stack ptr.
232 It is known that on the Convex the stack ptr isn't a valid index.
233 With luck, one or the other is valid on any machine. */
234 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
235 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
237 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
238 pat = PATTERN (insn);
240 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
241 mode = (enum machine_mode) ((int) mode + 1))
243 int regno;
244 rtx reg;
246 direct_load[(int) mode] = direct_store[(int) mode] = 0;
247 PUT_MODE (mem, mode);
248 PUT_MODE (mem1, mode);
250 /* See if there is some register that can be used in this mode and
251 directly loaded or stored from memory. */
253 if (mode != VOIDmode && mode != BLKmode)
254 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
255 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
256 regno++)
258 if (! HARD_REGNO_MODE_OK (regno, mode))
259 continue;
261 reg = gen_rtx_REG (mode, regno);
263 SET_SRC (pat) = mem;
264 SET_DEST (pat) = reg;
265 if (recog (pat, insn, &num_clobbers) >= 0)
266 direct_load[(int) mode] = 1;
268 SET_SRC (pat) = mem1;
269 SET_DEST (pat) = reg;
270 if (recog (pat, insn, &num_clobbers) >= 0)
271 direct_load[(int) mode] = 1;
273 SET_SRC (pat) = reg;
274 SET_DEST (pat) = mem;
275 if (recog (pat, insn, &num_clobbers) >= 0)
276 direct_store[(int) mode] = 1;
278 SET_SRC (pat) = reg;
279 SET_DEST (pat) = mem1;
280 if (recog (pat, insn, &num_clobbers) >= 0)
281 direct_store[(int) mode] = 1;
285 end_sequence ();
288 /* This is run at the start of compiling a function. */
290 void
291 init_expr ()
293 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
295 pending_chain = 0;
296 pending_stack_adjust = 0;
297 stack_pointer_delta = 0;
298 inhibit_defer_pop = 0;
299 saveregs_value = 0;
300 apply_args_value = 0;
301 forced_labels = 0;
304 void
305 mark_expr_status (p)
306 struct expr_status *p;
308 if (p == NULL)
309 return;
311 ggc_mark_rtx (p->x_saveregs_value);
312 ggc_mark_rtx (p->x_apply_args_value);
313 ggc_mark_rtx (p->x_forced_labels);
316 void
317 free_expr_status (f)
318 struct function *f;
320 free (f->expr);
321 f->expr = NULL;
324 /* Small sanity check that the queue is empty at the end of a function. */
326 void
327 finish_expr_for_function ()
329 if (pending_chain)
330 abort ();
333 /* Manage the queue of increment instructions to be output
334 for POSTINCREMENT_EXPR expressions, etc. */
336 /* Queue up to increment (or change) VAR later. BODY says how:
337 BODY should be the same thing you would pass to emit_insn
338 to increment right away. It will go to emit_insn later on.
340 The value is a QUEUED expression to be used in place of VAR
341 where you want to guarantee the pre-incrementation value of VAR. */
343 static rtx
344 enqueue_insn (var, body)
345 rtx var, body;
347 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
348 body, pending_chain);
349 return pending_chain;
352 /* Use protect_from_queue to convert a QUEUED expression
353 into something that you can put immediately into an instruction.
354 If the queued incrementation has not happened yet,
355 protect_from_queue returns the variable itself.
356 If the incrementation has happened, protect_from_queue returns a temp
357 that contains a copy of the old value of the variable.
359 Any time an rtx which might possibly be a QUEUED is to be put
360 into an instruction, it must be passed through protect_from_queue first.
361 QUEUED expressions are not meaningful in instructions.
363 Do not pass a value through protect_from_queue and then hold
364 on to it for a while before putting it in an instruction!
365 If the queue is flushed in between, incorrect code will result. */
368 protect_from_queue (x, modify)
369 rtx x;
370 int modify;
372 RTX_CODE code = GET_CODE (x);
374 #if 0 /* A QUEUED can hang around after the queue is forced out. */
375 /* Shortcut for most common case. */
376 if (pending_chain == 0)
377 return x;
378 #endif
380 if (code != QUEUED)
382 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
383 use of autoincrement. Make a copy of the contents of the memory
384 location rather than a copy of the address, but not if the value is
385 of mode BLKmode. Don't modify X in place since it might be
386 shared. */
387 if (code == MEM && GET_MODE (x) != BLKmode
388 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
390 rtx y = XEXP (x, 0);
391 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
393 if (QUEUED_INSN (y))
395 rtx temp = gen_reg_rtx (GET_MODE (x));
397 emit_insn_before (gen_move_insn (temp, new),
398 QUEUED_INSN (y));
399 return temp;
402 /* Copy the address into a pseudo, so that the returned value
403 remains correct across calls to emit_queue. */
404 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
407 /* Otherwise, recursively protect the subexpressions of all
408 the kinds of rtx's that can contain a QUEUED. */
409 if (code == MEM)
411 rtx tem = protect_from_queue (XEXP (x, 0), 0);
412 if (tem != XEXP (x, 0))
414 x = copy_rtx (x);
415 XEXP (x, 0) = tem;
418 else if (code == PLUS || code == MULT)
420 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
421 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
422 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
424 x = copy_rtx (x);
425 XEXP (x, 0) = new0;
426 XEXP (x, 1) = new1;
429 return x;
431 /* If the increment has not happened, use the variable itself. Copy it
432 into a new pseudo so that the value remains correct across calls to
433 emit_queue. */
434 if (QUEUED_INSN (x) == 0)
435 return copy_to_reg (QUEUED_VAR (x));
436 /* If the increment has happened and a pre-increment copy exists,
437 use that copy. */
438 if (QUEUED_COPY (x) != 0)
439 return QUEUED_COPY (x);
440 /* The increment has happened but we haven't set up a pre-increment copy.
441 Set one up now, and use it. */
442 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
443 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
444 QUEUED_INSN (x));
445 return QUEUED_COPY (x);
448 /* Return nonzero if X contains a QUEUED expression:
449 if it contains anything that will be altered by a queued increment.
450 We handle only combinations of MEM, PLUS, MINUS and MULT operators
451 since memory addresses generally contain only those. */
454 queued_subexp_p (x)
455 rtx x;
457 enum rtx_code code = GET_CODE (x);
458 switch (code)
460 case QUEUED:
461 return 1;
462 case MEM:
463 return queued_subexp_p (XEXP (x, 0));
464 case MULT:
465 case PLUS:
466 case MINUS:
467 return (queued_subexp_p (XEXP (x, 0))
468 || queued_subexp_p (XEXP (x, 1)));
469 default:
470 return 0;
474 /* Perform all the pending incrementations. */
476 void
477 emit_queue ()
479 rtx p;
480 while ((p = pending_chain))
482 rtx body = QUEUED_BODY (p);
484 if (GET_CODE (body) == SEQUENCE)
486 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
487 emit_insn (QUEUED_BODY (p));
489 else
490 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
491 pending_chain = QUEUED_NEXT (p);
495 /* Copy data from FROM to TO, where the machine modes are not the same.
496 Both modes may be integer, or both may be floating.
497 UNSIGNEDP should be nonzero if FROM is an unsigned type.
498 This causes zero-extension instead of sign-extension. */
500 void
501 convert_move (to, from, unsignedp)
502 rtx to, from;
503 int unsignedp;
505 enum machine_mode to_mode = GET_MODE (to);
506 enum machine_mode from_mode = GET_MODE (from);
507 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
508 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
509 enum insn_code code;
510 rtx libcall;
512 /* rtx code for making an equivalent value. */
513 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
515 to = protect_from_queue (to, 1);
516 from = protect_from_queue (from, 0);
518 if (to_real != from_real)
519 abort ();
521 /* If FROM is a SUBREG that indicates that we have already done at least
522 the required extension, strip it. We don't handle such SUBREGs as
523 TO here. */
525 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
526 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
527 >= GET_MODE_SIZE (to_mode))
528 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
529 from = gen_lowpart (to_mode, from), from_mode = to_mode;
531 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
532 abort ();
534 if (to_mode == from_mode
535 || (from_mode == VOIDmode && CONSTANT_P (from)))
537 emit_move_insn (to, from);
538 return;
541 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
543 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
544 abort ();
546 if (VECTOR_MODE_P (to_mode))
547 from = gen_rtx_SUBREG (to_mode, from, 0);
548 else
549 to = gen_rtx_SUBREG (from_mode, to, 0);
551 emit_move_insn (to, from);
552 return;
555 if (to_real != from_real)
556 abort ();
558 if (to_real)
560 rtx value, insns;
562 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
564 /* Try converting directly if the insn is supported. */
565 if ((code = can_extend_p (to_mode, from_mode, 0))
566 != CODE_FOR_nothing)
568 emit_unop_insn (code, to, from, UNKNOWN);
569 return;
573 #ifdef HAVE_trunchfqf2
574 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
576 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
577 return;
579 #endif
580 #ifdef HAVE_trunctqfqf2
581 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
583 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
584 return;
586 #endif
587 #ifdef HAVE_truncsfqf2
588 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
590 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
591 return;
593 #endif
594 #ifdef HAVE_truncdfqf2
595 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
597 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
598 return;
600 #endif
601 #ifdef HAVE_truncxfqf2
602 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
604 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
605 return;
607 #endif
608 #ifdef HAVE_trunctfqf2
609 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
611 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
612 return;
614 #endif
616 #ifdef HAVE_trunctqfhf2
617 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
619 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
620 return;
622 #endif
623 #ifdef HAVE_truncsfhf2
624 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
626 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
627 return;
629 #endif
630 #ifdef HAVE_truncdfhf2
631 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
633 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
634 return;
636 #endif
637 #ifdef HAVE_truncxfhf2
638 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
640 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
641 return;
643 #endif
644 #ifdef HAVE_trunctfhf2
645 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
647 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
648 return;
650 #endif
652 #ifdef HAVE_truncsftqf2
653 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
655 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
656 return;
658 #endif
659 #ifdef HAVE_truncdftqf2
660 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
662 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
663 return;
665 #endif
666 #ifdef HAVE_truncxftqf2
667 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
669 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
670 return;
672 #endif
673 #ifdef HAVE_trunctftqf2
674 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
676 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
677 return;
679 #endif
681 #ifdef HAVE_truncdfsf2
682 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
684 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
685 return;
687 #endif
688 #ifdef HAVE_truncxfsf2
689 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
691 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
692 return;
694 #endif
695 #ifdef HAVE_trunctfsf2
696 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
698 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
699 return;
701 #endif
702 #ifdef HAVE_truncxfdf2
703 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
705 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
706 return;
708 #endif
709 #ifdef HAVE_trunctfdf2
710 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
712 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
713 return;
715 #endif
717 libcall = (rtx) 0;
718 switch (from_mode)
720 case SFmode:
721 switch (to_mode)
723 case DFmode:
724 libcall = extendsfdf2_libfunc;
725 break;
727 case XFmode:
728 libcall = extendsfxf2_libfunc;
729 break;
731 case TFmode:
732 libcall = extendsftf2_libfunc;
733 break;
735 default:
736 break;
738 break;
740 case DFmode:
741 switch (to_mode)
743 case SFmode:
744 libcall = truncdfsf2_libfunc;
745 break;
747 case XFmode:
748 libcall = extenddfxf2_libfunc;
749 break;
751 case TFmode:
752 libcall = extenddftf2_libfunc;
753 break;
755 default:
756 break;
758 break;
760 case XFmode:
761 switch (to_mode)
763 case SFmode:
764 libcall = truncxfsf2_libfunc;
765 break;
767 case DFmode:
768 libcall = truncxfdf2_libfunc;
769 break;
771 default:
772 break;
774 break;
776 case TFmode:
777 switch (to_mode)
779 case SFmode:
780 libcall = trunctfsf2_libfunc;
781 break;
783 case DFmode:
784 libcall = trunctfdf2_libfunc;
785 break;
787 default:
788 break;
790 break;
792 default:
793 break;
796 if (libcall == (rtx) 0)
797 /* This conversion is not implemented yet. */
798 abort ();
800 start_sequence ();
801 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
802 1, from, from_mode);
803 insns = get_insns ();
804 end_sequence ();
805 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
806 from));
807 return;
810 /* Now both modes are integers. */
812 /* Handle expanding beyond a word. */
813 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
814 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
816 rtx insns;
817 rtx lowpart;
818 rtx fill_value;
819 rtx lowfrom;
820 int i;
821 enum machine_mode lowpart_mode;
822 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
824 /* Try converting directly if the insn is supported. */
825 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
826 != CODE_FOR_nothing)
828 /* If FROM is a SUBREG, put it into a register. Do this
829 so that we always generate the same set of insns for
830 better cse'ing; if an intermediate assignment occurred,
831 we won't be doing the operation directly on the SUBREG. */
832 if (optimize > 0 && GET_CODE (from) == SUBREG)
833 from = force_reg (from_mode, from);
834 emit_unop_insn (code, to, from, equiv_code);
835 return;
837 /* Next, try converting via full word. */
838 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
839 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
840 != CODE_FOR_nothing))
842 if (GET_CODE (to) == REG)
843 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
844 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
845 emit_unop_insn (code, to,
846 gen_lowpart (word_mode, to), equiv_code);
847 return;
850 /* No special multiword conversion insn; do it by hand. */
851 start_sequence ();
853 /* Since we will turn this into a no conflict block, we must ensure
854 that the source does not overlap the target. */
856 if (reg_overlap_mentioned_p (to, from))
857 from = force_reg (from_mode, from);
859 /* Get a copy of FROM widened to a word, if necessary. */
860 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
861 lowpart_mode = word_mode;
862 else
863 lowpart_mode = from_mode;
865 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
867 lowpart = gen_lowpart (lowpart_mode, to);
868 emit_move_insn (lowpart, lowfrom);
870 /* Compute the value to put in each remaining word. */
871 if (unsignedp)
872 fill_value = const0_rtx;
873 else
875 #ifdef HAVE_slt
876 if (HAVE_slt
877 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
878 && STORE_FLAG_VALUE == -1)
880 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
881 lowpart_mode, 0, 0);
882 fill_value = gen_reg_rtx (word_mode);
883 emit_insn (gen_slt (fill_value));
885 else
886 #endif
888 fill_value
889 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
890 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
891 NULL_RTX, 0);
892 fill_value = convert_to_mode (word_mode, fill_value, 1);
896 /* Fill the remaining words. */
897 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
899 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
900 rtx subword = operand_subword (to, index, 1, to_mode);
902 if (subword == 0)
903 abort ();
905 if (fill_value != subword)
906 emit_move_insn (subword, fill_value);
909 insns = get_insns ();
910 end_sequence ();
912 emit_no_conflict_block (insns, to, from, NULL_RTX,
913 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
914 return;
917 /* Truncating multi-word to a word or less. */
918 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
919 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
921 if (!((GET_CODE (from) == MEM
922 && ! MEM_VOLATILE_P (from)
923 && direct_load[(int) to_mode]
924 && ! mode_dependent_address_p (XEXP (from, 0)))
925 || GET_CODE (from) == REG
926 || GET_CODE (from) == SUBREG))
927 from = force_reg (from_mode, from);
928 convert_move (to, gen_lowpart (word_mode, from), 0);
929 return;
932 /* Handle pointer conversion. */ /* SPEE 900220. */
933 if (to_mode == PQImode)
935 if (from_mode != QImode)
936 from = convert_to_mode (QImode, from, unsignedp);
938 #ifdef HAVE_truncqipqi2
939 if (HAVE_truncqipqi2)
941 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
942 return;
944 #endif /* HAVE_truncqipqi2 */
945 abort ();
948 if (from_mode == PQImode)
950 if (to_mode != QImode)
952 from = convert_to_mode (QImode, from, unsignedp);
953 from_mode = QImode;
955 else
957 #ifdef HAVE_extendpqiqi2
958 if (HAVE_extendpqiqi2)
960 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
961 return;
963 #endif /* HAVE_extendpqiqi2 */
964 abort ();
968 if (to_mode == PSImode)
970 if (from_mode != SImode)
971 from = convert_to_mode (SImode, from, unsignedp);
973 #ifdef HAVE_truncsipsi2
974 if (HAVE_truncsipsi2)
976 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
977 return;
979 #endif /* HAVE_truncsipsi2 */
980 abort ();
983 if (from_mode == PSImode)
985 if (to_mode != SImode)
987 from = convert_to_mode (SImode, from, unsignedp);
988 from_mode = SImode;
990 else
992 #ifdef HAVE_extendpsisi2
993 if (! unsignedp && HAVE_extendpsisi2)
995 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
996 return;
998 #endif /* HAVE_extendpsisi2 */
999 #ifdef HAVE_zero_extendpsisi2
1000 if (unsignedp && HAVE_zero_extendpsisi2)
1002 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1003 return;
1005 #endif /* HAVE_zero_extendpsisi2 */
1006 abort ();
1010 if (to_mode == PDImode)
1012 if (from_mode != DImode)
1013 from = convert_to_mode (DImode, from, unsignedp);
1015 #ifdef HAVE_truncdipdi2
1016 if (HAVE_truncdipdi2)
1018 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1019 return;
1021 #endif /* HAVE_truncdipdi2 */
1022 abort ();
1025 if (from_mode == PDImode)
1027 if (to_mode != DImode)
1029 from = convert_to_mode (DImode, from, unsignedp);
1030 from_mode = DImode;
1032 else
1034 #ifdef HAVE_extendpdidi2
1035 if (HAVE_extendpdidi2)
1037 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1038 return;
1040 #endif /* HAVE_extendpdidi2 */
1041 abort ();
1045 /* Now follow all the conversions between integers
1046 no more than a word long. */
1048 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1049 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1050 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1051 GET_MODE_BITSIZE (from_mode)))
1053 if (!((GET_CODE (from) == MEM
1054 && ! MEM_VOLATILE_P (from)
1055 && direct_load[(int) to_mode]
1056 && ! mode_dependent_address_p (XEXP (from, 0)))
1057 || GET_CODE (from) == REG
1058 || GET_CODE (from) == SUBREG))
1059 from = force_reg (from_mode, from);
1060 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1061 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1062 from = copy_to_reg (from);
1063 emit_move_insn (to, gen_lowpart (to_mode, from));
1064 return;
1067 /* Handle extension. */
1068 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1070 /* Convert directly if that works. */
1071 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1072 != CODE_FOR_nothing)
1074 emit_unop_insn (code, to, from, equiv_code);
1075 return;
1077 else
1079 enum machine_mode intermediate;
1080 rtx tmp;
1081 tree shift_amount;
1083 /* Search for a mode to convert via. */
1084 for (intermediate = from_mode; intermediate != VOIDmode;
1085 intermediate = GET_MODE_WIDER_MODE (intermediate))
1086 if (((can_extend_p (to_mode, intermediate, unsignedp)
1087 != CODE_FOR_nothing)
1088 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1089 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1090 GET_MODE_BITSIZE (intermediate))))
1091 && (can_extend_p (intermediate, from_mode, unsignedp)
1092 != CODE_FOR_nothing))
1094 convert_move (to, convert_to_mode (intermediate, from,
1095 unsignedp), unsignedp);
1096 return;
1099 /* No suitable intermediate mode.
1100 Generate what we need with shifts. */
1101 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1102 - GET_MODE_BITSIZE (from_mode), 0);
1103 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1104 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1105 to, unsignedp);
1106 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1107 to, unsignedp);
1108 if (tmp != to)
1109 emit_move_insn (to, tmp);
1110 return;
1114 /* Support special truncate insns for certain modes. */
1116 if (from_mode == DImode && to_mode == SImode)
1118 #ifdef HAVE_truncdisi2
1119 if (HAVE_truncdisi2)
1121 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1122 return;
1124 #endif
1125 convert_move (to, force_reg (from_mode, from), unsignedp);
1126 return;
1129 if (from_mode == DImode && to_mode == HImode)
1131 #ifdef HAVE_truncdihi2
1132 if (HAVE_truncdihi2)
1134 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1135 return;
1137 #endif
1138 convert_move (to, force_reg (from_mode, from), unsignedp);
1139 return;
1142 if (from_mode == DImode && to_mode == QImode)
1144 #ifdef HAVE_truncdiqi2
1145 if (HAVE_truncdiqi2)
1147 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1148 return;
1150 #endif
1151 convert_move (to, force_reg (from_mode, from), unsignedp);
1152 return;
1155 if (from_mode == SImode && to_mode == HImode)
1157 #ifdef HAVE_truncsihi2
1158 if (HAVE_truncsihi2)
1160 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1161 return;
1163 #endif
1164 convert_move (to, force_reg (from_mode, from), unsignedp);
1165 return;
1168 if (from_mode == SImode && to_mode == QImode)
1170 #ifdef HAVE_truncsiqi2
1171 if (HAVE_truncsiqi2)
1173 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1174 return;
1176 #endif
1177 convert_move (to, force_reg (from_mode, from), unsignedp);
1178 return;
1181 if (from_mode == HImode && to_mode == QImode)
1183 #ifdef HAVE_trunchiqi2
1184 if (HAVE_trunchiqi2)
1186 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1187 return;
1189 #endif
1190 convert_move (to, force_reg (from_mode, from), unsignedp);
1191 return;
1194 if (from_mode == TImode && to_mode == DImode)
1196 #ifdef HAVE_trunctidi2
1197 if (HAVE_trunctidi2)
1199 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1200 return;
1202 #endif
1203 convert_move (to, force_reg (from_mode, from), unsignedp);
1204 return;
1207 if (from_mode == TImode && to_mode == SImode)
1209 #ifdef HAVE_trunctisi2
1210 if (HAVE_trunctisi2)
1212 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1213 return;
1215 #endif
1216 convert_move (to, force_reg (from_mode, from), unsignedp);
1217 return;
1220 if (from_mode == TImode && to_mode == HImode)
1222 #ifdef HAVE_trunctihi2
1223 if (HAVE_trunctihi2)
1225 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1226 return;
1228 #endif
1229 convert_move (to, force_reg (from_mode, from), unsignedp);
1230 return;
1233 if (from_mode == TImode && to_mode == QImode)
1235 #ifdef HAVE_trunctiqi2
1236 if (HAVE_trunctiqi2)
1238 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1239 return;
1241 #endif
1242 convert_move (to, force_reg (from_mode, from), unsignedp);
1243 return;
1246 /* Handle truncation of volatile memrefs, and so on;
1247 the things that couldn't be truncated directly,
1248 and for which there was no special instruction. */
1249 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1251 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1252 emit_move_insn (to, temp);
1253 return;
1256 /* Mode combination is not recognized. */
1257 abort ();
1260 /* Return an rtx for a value that would result
1261 from converting X to mode MODE.
1262 Both X and MODE may be floating, or both integer.
1263 UNSIGNEDP is nonzero if X is an unsigned value.
1264 This can be done by referring to a part of X in place
1265 or by copying to a new temporary with conversion.
1267 This function *must not* call protect_from_queue
1268 except when putting X into an insn (in which case convert_move does it). */
1271 convert_to_mode (mode, x, unsignedp)
1272 enum machine_mode mode;
1273 rtx x;
1274 int unsignedp;
1276 return convert_modes (mode, VOIDmode, x, unsignedp);
1279 /* Return an rtx for a value that would result
1280 from converting X from mode OLDMODE to mode MODE.
1281 Both modes may be floating, or both integer.
1282 UNSIGNEDP is nonzero if X is an unsigned value.
1284 This can be done by referring to a part of X in place
1285 or by copying to a new temporary with conversion.
1287 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1289 This function *must not* call protect_from_queue
1290 except when putting X into an insn (in which case convert_move does it). */
1293 convert_modes (mode, oldmode, x, unsignedp)
1294 enum machine_mode mode, oldmode;
1295 rtx x;
1296 int unsignedp;
1298 rtx temp;
1300 /* If FROM is a SUBREG that indicates that we have already done at least
1301 the required extension, strip it. */
1303 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1304 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1305 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1306 x = gen_lowpart (mode, x);
1308 if (GET_MODE (x) != VOIDmode)
1309 oldmode = GET_MODE (x);
1311 if (mode == oldmode)
1312 return x;
1314 /* There is one case that we must handle specially: If we are converting
1315 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1316 we are to interpret the constant as unsigned, gen_lowpart will do
1317 the wrong if the constant appears negative. What we want to do is
1318 make the high-order word of the constant zero, not all ones. */
1320 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1321 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1322 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1324 HOST_WIDE_INT val = INTVAL (x);
1326 if (oldmode != VOIDmode
1327 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1329 int width = GET_MODE_BITSIZE (oldmode);
1331 /* We need to zero extend VAL. */
1332 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1335 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1338 /* We can do this with a gen_lowpart if both desired and current modes
1339 are integer, and this is either a constant integer, a register, or a
1340 non-volatile MEM. Except for the constant case where MODE is no
1341 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1343 if ((GET_CODE (x) == CONST_INT
1344 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1345 || (GET_MODE_CLASS (mode) == MODE_INT
1346 && GET_MODE_CLASS (oldmode) == MODE_INT
1347 && (GET_CODE (x) == CONST_DOUBLE
1348 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1349 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1350 && direct_load[(int) mode])
1351 || (GET_CODE (x) == REG
1352 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1353 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1355 /* ?? If we don't know OLDMODE, we have to assume here that
1356 X does not need sign- or zero-extension. This may not be
1357 the case, but it's the best we can do. */
1358 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1359 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1361 HOST_WIDE_INT val = INTVAL (x);
1362 int width = GET_MODE_BITSIZE (oldmode);
1364 /* We must sign or zero-extend in this case. Start by
1365 zero-extending, then sign extend if we need to. */
1366 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1367 if (! unsignedp
1368 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1369 val |= (HOST_WIDE_INT) (-1) << width;
1371 return GEN_INT (trunc_int_for_mode (val, mode));
1374 return gen_lowpart (mode, x);
1377 temp = gen_reg_rtx (mode);
1378 convert_move (temp, x, unsignedp);
1379 return temp;
1382 /* This macro is used to determine what the largest unit size that
1383 move_by_pieces can use is. */
1385 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1386 move efficiently, as opposed to MOVE_MAX which is the maximum
1387 number of bytes we can move with a single instruction. */
1389 #ifndef MOVE_MAX_PIECES
1390 #define MOVE_MAX_PIECES MOVE_MAX
1391 #endif
1393 /* Generate several move instructions to copy LEN bytes from block FROM to
1394 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1395 and TO through protect_from_queue before calling.
1397 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1398 used to push FROM to the stack.
1400 ALIGN is maximum alignment we can assume. */
1402 void
1403 move_by_pieces (to, from, len, align)
1404 rtx to, from;
1405 unsigned HOST_WIDE_INT len;
1406 unsigned int align;
1408 struct move_by_pieces data;
1409 rtx to_addr, from_addr = XEXP (from, 0);
1410 unsigned int max_size = MOVE_MAX_PIECES + 1;
1411 enum machine_mode mode = VOIDmode, tmode;
1412 enum insn_code icode;
1414 data.offset = 0;
1415 data.from_addr = from_addr;
1416 if (to)
1418 to_addr = XEXP (to, 0);
1419 data.to = to;
1420 data.autinc_to
1421 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1422 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1423 data.reverse
1424 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1426 else
1428 to_addr = NULL_RTX;
1429 data.to = NULL_RTX;
1430 data.autinc_to = 1;
1431 #ifdef STACK_GROWS_DOWNWARD
1432 data.reverse = 1;
1433 #else
1434 data.reverse = 0;
1435 #endif
1437 data.to_addr = to_addr;
1438 data.from = from;
1439 data.autinc_from
1440 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1441 || GET_CODE (from_addr) == POST_INC
1442 || GET_CODE (from_addr) == POST_DEC);
1444 data.explicit_inc_from = 0;
1445 data.explicit_inc_to = 0;
1446 if (data.reverse) data.offset = len;
1447 data.len = len;
1449 /* If copying requires more than two move insns,
1450 copy addresses to registers (to make displacements shorter)
1451 and use post-increment if available. */
1452 if (!(data.autinc_from && data.autinc_to)
1453 && move_by_pieces_ninsns (len, align) > 2)
1455 /* Find the mode of the largest move... */
1456 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1457 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1458 if (GET_MODE_SIZE (tmode) < max_size)
1459 mode = tmode;
1461 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1463 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1464 data.autinc_from = 1;
1465 data.explicit_inc_from = -1;
1467 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1469 data.from_addr = copy_addr_to_reg (from_addr);
1470 data.autinc_from = 1;
1471 data.explicit_inc_from = 1;
1473 if (!data.autinc_from && CONSTANT_P (from_addr))
1474 data.from_addr = copy_addr_to_reg (from_addr);
1475 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1477 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1478 data.autinc_to = 1;
1479 data.explicit_inc_to = -1;
1481 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1483 data.to_addr = copy_addr_to_reg (to_addr);
1484 data.autinc_to = 1;
1485 data.explicit_inc_to = 1;
1487 if (!data.autinc_to && CONSTANT_P (to_addr))
1488 data.to_addr = copy_addr_to_reg (to_addr);
1491 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1492 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1493 align = MOVE_MAX * BITS_PER_UNIT;
1495 /* First move what we can in the largest integer mode, then go to
1496 successively smaller modes. */
1498 while (max_size > 1)
1500 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1501 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1502 if (GET_MODE_SIZE (tmode) < max_size)
1503 mode = tmode;
1505 if (mode == VOIDmode)
1506 break;
1508 icode = mov_optab->handlers[(int) mode].insn_code;
1509 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1510 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1512 max_size = GET_MODE_SIZE (mode);
1515 /* The code above should have handled everything. */
1516 if (data.len > 0)
1517 abort ();
1520 /* Return number of insns required to move L bytes by pieces.
1521 ALIGN (in bits) is maximum alignment we can assume. */
1523 static unsigned HOST_WIDE_INT
1524 move_by_pieces_ninsns (l, align)
1525 unsigned HOST_WIDE_INT l;
1526 unsigned int align;
1528 unsigned HOST_WIDE_INT n_insns = 0;
1529 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1531 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1532 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1533 align = MOVE_MAX * BITS_PER_UNIT;
1535 while (max_size > 1)
1537 enum machine_mode mode = VOIDmode, tmode;
1538 enum insn_code icode;
1540 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1541 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1542 if (GET_MODE_SIZE (tmode) < max_size)
1543 mode = tmode;
1545 if (mode == VOIDmode)
1546 break;
1548 icode = mov_optab->handlers[(int) mode].insn_code;
1549 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1550 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1552 max_size = GET_MODE_SIZE (mode);
1555 if (l)
1556 abort ();
1557 return n_insns;
1560 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1561 with move instructions for mode MODE. GENFUN is the gen_... function
1562 to make a move insn for that mode. DATA has all the other info. */
1564 static void
1565 move_by_pieces_1 (genfun, mode, data)
1566 rtx (*genfun) PARAMS ((rtx, ...));
1567 enum machine_mode mode;
1568 struct move_by_pieces *data;
1570 unsigned int size = GET_MODE_SIZE (mode);
1571 rtx to1 = NULL_RTX, from1;
1573 while (data->len >= size)
1575 if (data->reverse)
1576 data->offset -= size;
1578 if (data->to)
1580 if (data->autinc_to)
1582 to1 = replace_equiv_address (data->to, data->to_addr);
1583 to1 = adjust_address (to1, mode, 0);
1585 else
1586 to1 = adjust_address (data->to, mode, data->offset);
1589 if (data->autinc_from)
1591 from1 = replace_equiv_address (data->from, data->from_addr);
1592 from1 = adjust_address (from1, mode, 0);
1594 else
1595 from1 = adjust_address (data->from, mode, data->offset);
1597 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1598 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1599 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1600 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1602 if (data->to)
1603 emit_insn ((*genfun) (to1, from1));
1604 else
1606 #ifdef PUSH_ROUNDING
1607 emit_single_push_insn (mode, from1, NULL);
1608 #else
1609 abort ();
1610 #endif
1613 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1614 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1615 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1616 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1618 if (! data->reverse)
1619 data->offset += size;
1621 data->len -= size;
1625 /* Emit code to move a block Y to a block X.
1626 This may be done with string-move instructions,
1627 with multiple scalar move instructions, or with a library call.
1629 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1630 with mode BLKmode.
1631 SIZE is an rtx that says how long they are.
1632 ALIGN is the maximum alignment we can assume they have.
1634 Return the address of the new block, if memcpy is called and returns it,
1635 0 otherwise. */
1638 emit_block_move (x, y, size, align)
1639 rtx x, y;
1640 rtx size;
1641 unsigned int align;
1643 rtx retval = 0;
1644 #ifdef TARGET_MEM_FUNCTIONS
1645 static tree fn;
1646 tree call_expr, arg_list;
1647 #endif
1649 if (GET_MODE (x) != BLKmode)
1650 abort ();
1652 if (GET_MODE (y) != BLKmode)
1653 abort ();
1655 x = protect_from_queue (x, 1);
1656 y = protect_from_queue (y, 0);
1657 size = protect_from_queue (size, 0);
1659 if (GET_CODE (x) != MEM)
1660 abort ();
1661 if (GET_CODE (y) != MEM)
1662 abort ();
1663 if (size == 0)
1664 abort ();
1666 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1667 move_by_pieces (x, y, INTVAL (size), align);
1668 else
1670 /* Try the most limited insn first, because there's no point
1671 including more than one in the machine description unless
1672 the more limited one has some advantage. */
1674 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1675 enum machine_mode mode;
1677 /* Since this is a move insn, we don't care about volatility. */
1678 volatile_ok = 1;
1680 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1681 mode = GET_MODE_WIDER_MODE (mode))
1683 enum insn_code code = movstr_optab[(int) mode];
1684 insn_operand_predicate_fn pred;
1686 if (code != CODE_FOR_nothing
1687 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1688 here because if SIZE is less than the mode mask, as it is
1689 returned by the macro, it will definitely be less than the
1690 actual mode mask. */
1691 && ((GET_CODE (size) == CONST_INT
1692 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1693 <= (GET_MODE_MASK (mode) >> 1)))
1694 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1695 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1696 || (*pred) (x, BLKmode))
1697 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1698 || (*pred) (y, BLKmode))
1699 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1700 || (*pred) (opalign, VOIDmode)))
1702 rtx op2;
1703 rtx last = get_last_insn ();
1704 rtx pat;
1706 op2 = convert_to_mode (mode, size, 1);
1707 pred = insn_data[(int) code].operand[2].predicate;
1708 if (pred != 0 && ! (*pred) (op2, mode))
1709 op2 = copy_to_mode_reg (mode, op2);
1711 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1712 if (pat)
1714 emit_insn (pat);
1715 volatile_ok = 0;
1716 return 0;
1718 else
1719 delete_insns_since (last);
1723 volatile_ok = 0;
1725 /* X, Y, or SIZE may have been passed through protect_from_queue.
1727 It is unsafe to save the value generated by protect_from_queue
1728 and reuse it later. Consider what happens if emit_queue is
1729 called before the return value from protect_from_queue is used.
1731 Expansion of the CALL_EXPR below will call emit_queue before
1732 we are finished emitting RTL for argument setup. So if we are
1733 not careful we could get the wrong value for an argument.
1735 To avoid this problem we go ahead and emit code to copy X, Y &
1736 SIZE into new pseudos. We can then place those new pseudos
1737 into an RTL_EXPR and use them later, even after a call to
1738 emit_queue.
1740 Note this is not strictly needed for library calls since they
1741 do not call emit_queue before loading their arguments. However,
1742 we may need to have library calls call emit_queue in the future
1743 since failing to do so could cause problems for targets which
1744 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1745 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1746 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1748 #ifdef TARGET_MEM_FUNCTIONS
1749 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1750 #else
1751 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1752 TREE_UNSIGNED (integer_type_node));
1753 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1754 #endif
1756 #ifdef TARGET_MEM_FUNCTIONS
1757 /* It is incorrect to use the libcall calling conventions to call
1758 memcpy in this context.
1760 This could be a user call to memcpy and the user may wish to
1761 examine the return value from memcpy.
1763 For targets where libcalls and normal calls have different conventions
1764 for returning pointers, we could end up generating incorrect code.
1766 So instead of using a libcall sequence we build up a suitable
1767 CALL_EXPR and expand the call in the normal fashion. */
1768 if (fn == NULL_TREE)
1770 tree fntype;
1772 /* This was copied from except.c, I don't know if all this is
1773 necessary in this context or not. */
1774 fn = get_identifier ("memcpy");
1775 fntype = build_pointer_type (void_type_node);
1776 fntype = build_function_type (fntype, NULL_TREE);
1777 fn = build_decl (FUNCTION_DECL, fn, fntype);
1778 ggc_add_tree_root (&fn, 1);
1779 DECL_EXTERNAL (fn) = 1;
1780 TREE_PUBLIC (fn) = 1;
1781 DECL_ARTIFICIAL (fn) = 1;
1782 TREE_NOTHROW (fn) = 1;
1783 make_decl_rtl (fn, NULL);
1784 assemble_external (fn);
1787 /* We need to make an argument list for the function call.
1789 memcpy has three arguments, the first two are void * addresses and
1790 the last is a size_t byte count for the copy. */
1791 arg_list
1792 = build_tree_list (NULL_TREE,
1793 make_tree (build_pointer_type (void_type_node), x));
1794 TREE_CHAIN (arg_list)
1795 = build_tree_list (NULL_TREE,
1796 make_tree (build_pointer_type (void_type_node), y));
1797 TREE_CHAIN (TREE_CHAIN (arg_list))
1798 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1799 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1801 /* Now we have to build up the CALL_EXPR itself. */
1802 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1803 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1804 call_expr, arg_list, NULL_TREE);
1805 TREE_SIDE_EFFECTS (call_expr) = 1;
1807 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1808 #else
1809 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1810 VOIDmode, 3, y, Pmode, x, Pmode,
1811 convert_to_mode (TYPE_MODE (integer_type_node), size,
1812 TREE_UNSIGNED (integer_type_node)),
1813 TYPE_MODE (integer_type_node));
1814 #endif
1817 return retval;
1820 /* Copy all or part of a value X into registers starting at REGNO.
1821 The number of registers to be filled is NREGS. */
1823 void
1824 move_block_to_reg (regno, x, nregs, mode)
1825 int regno;
1826 rtx x;
1827 int nregs;
1828 enum machine_mode mode;
1830 int i;
1831 #ifdef HAVE_load_multiple
1832 rtx pat;
1833 rtx last;
1834 #endif
1836 if (nregs == 0)
1837 return;
1839 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1840 x = validize_mem (force_const_mem (mode, x));
1842 /* See if the machine can do this with a load multiple insn. */
1843 #ifdef HAVE_load_multiple
1844 if (HAVE_load_multiple)
1846 last = get_last_insn ();
1847 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1848 GEN_INT (nregs));
1849 if (pat)
1851 emit_insn (pat);
1852 return;
1854 else
1855 delete_insns_since (last);
1857 #endif
1859 for (i = 0; i < nregs; i++)
1860 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1861 operand_subword_force (x, i, mode));
1864 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1865 The number of registers to be filled is NREGS. SIZE indicates the number
1866 of bytes in the object X. */
1868 void
1869 move_block_from_reg (regno, x, nregs, size)
1870 int regno;
1871 rtx x;
1872 int nregs;
1873 int size;
1875 int i;
1876 #ifdef HAVE_store_multiple
1877 rtx pat;
1878 rtx last;
1879 #endif
1880 enum machine_mode mode;
1882 if (nregs == 0)
1883 return;
1885 /* If SIZE is that of a mode no bigger than a word, just use that
1886 mode's store operation. */
1887 if (size <= UNITS_PER_WORD
1888 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1890 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1891 return;
1894 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1895 to the left before storing to memory. Note that the previous test
1896 doesn't handle all cases (e.g. SIZE == 3). */
1897 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1899 rtx tem = operand_subword (x, 0, 1, BLKmode);
1900 rtx shift;
1902 if (tem == 0)
1903 abort ();
1905 shift = expand_shift (LSHIFT_EXPR, word_mode,
1906 gen_rtx_REG (word_mode, regno),
1907 build_int_2 ((UNITS_PER_WORD - size)
1908 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1909 emit_move_insn (tem, shift);
1910 return;
1913 /* See if the machine can do this with a store multiple insn. */
1914 #ifdef HAVE_store_multiple
1915 if (HAVE_store_multiple)
1917 last = get_last_insn ();
1918 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1919 GEN_INT (nregs));
1920 if (pat)
1922 emit_insn (pat);
1923 return;
1925 else
1926 delete_insns_since (last);
1928 #endif
1930 for (i = 0; i < nregs; i++)
1932 rtx tem = operand_subword (x, i, 1, BLKmode);
1934 if (tem == 0)
1935 abort ();
1937 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1941 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1942 registers represented by a PARALLEL. SSIZE represents the total size of
1943 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1944 SRC in bits. */
1945 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1946 the balance will be in what would be the low-order memory addresses, i.e.
1947 left justified for big endian, right justified for little endian. This
1948 happens to be true for the targets currently using this support. If this
1949 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1950 would be needed. */
1952 void
1953 emit_group_load (dst, orig_src, ssize, align)
1954 rtx dst, orig_src;
1955 unsigned int align;
1956 int ssize;
1958 rtx *tmps, src;
1959 int start, i;
1961 if (GET_CODE (dst) != PARALLEL)
1962 abort ();
1964 /* Check for a NULL entry, used to indicate that the parameter goes
1965 both on the stack and in registers. */
1966 if (XEXP (XVECEXP (dst, 0, 0), 0))
1967 start = 0;
1968 else
1969 start = 1;
1971 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1973 /* Process the pieces. */
1974 for (i = start; i < XVECLEN (dst, 0); i++)
1976 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1977 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1978 unsigned int bytelen = GET_MODE_SIZE (mode);
1979 int shift = 0;
1981 /* Handle trailing fragments that run over the size of the struct. */
1982 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1984 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1985 bytelen = ssize - bytepos;
1986 if (bytelen <= 0)
1987 abort ();
1990 /* If we won't be loading directly from memory, protect the real source
1991 from strange tricks we might play; but make sure that the source can
1992 be loaded directly into the destination. */
1993 src = orig_src;
1994 if (GET_CODE (orig_src) != MEM
1995 && (!CONSTANT_P (orig_src)
1996 || (GET_MODE (orig_src) != mode
1997 && GET_MODE (orig_src) != VOIDmode)))
1999 if (GET_MODE (orig_src) == VOIDmode)
2000 src = gen_reg_rtx (mode);
2001 else
2002 src = gen_reg_rtx (GET_MODE (orig_src));
2003 emit_move_insn (src, orig_src);
2006 /* Optimize the access just a bit. */
2007 if (GET_CODE (src) == MEM
2008 && align >= GET_MODE_ALIGNMENT (mode)
2009 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2010 && bytelen == GET_MODE_SIZE (mode))
2012 tmps[i] = gen_reg_rtx (mode);
2013 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2015 else if (GET_CODE (src) == CONCAT)
2017 if (bytepos == 0
2018 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2019 tmps[i] = XEXP (src, 0);
2020 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2021 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
2022 tmps[i] = XEXP (src, 1);
2023 else
2024 abort ();
2026 else if (CONSTANT_P (src)
2027 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2028 tmps[i] = src;
2029 else
2030 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2031 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2032 mode, mode, align, ssize);
2034 if (BYTES_BIG_ENDIAN && shift)
2035 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2036 tmps[i], 0, OPTAB_WIDEN);
2039 emit_queue ();
2041 /* Copy the extracted pieces into the proper (probable) hard regs. */
2042 for (i = start; i < XVECLEN (dst, 0); i++)
2043 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2046 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2047 registers represented by a PARALLEL. SSIZE represents the total size of
2048 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2050 void
2051 emit_group_store (orig_dst, src, ssize, align)
2052 rtx orig_dst, src;
2053 int ssize;
2054 unsigned int align;
2056 rtx *tmps, dst;
2057 int start, i;
2059 if (GET_CODE (src) != PARALLEL)
2060 abort ();
2062 /* Check for a NULL entry, used to indicate that the parameter goes
2063 both on the stack and in registers. */
2064 if (XEXP (XVECEXP (src, 0, 0), 0))
2065 start = 0;
2066 else
2067 start = 1;
2069 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2071 /* Copy the (probable) hard regs into pseudos. */
2072 for (i = start; i < XVECLEN (src, 0); i++)
2074 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2075 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2076 emit_move_insn (tmps[i], reg);
2078 emit_queue ();
2080 /* If we won't be storing directly into memory, protect the real destination
2081 from strange tricks we might play. */
2082 dst = orig_dst;
2083 if (GET_CODE (dst) == PARALLEL)
2085 rtx temp;
2087 /* We can get a PARALLEL dst if there is a conditional expression in
2088 a return statement. In that case, the dst and src are the same,
2089 so no action is necessary. */
2090 if (rtx_equal_p (dst, src))
2091 return;
2093 /* It is unclear if we can ever reach here, but we may as well handle
2094 it. Allocate a temporary, and split this into a store/load to/from
2095 the temporary. */
2097 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2098 emit_group_store (temp, src, ssize, align);
2099 emit_group_load (dst, temp, ssize, align);
2100 return;
2102 else if (GET_CODE (dst) != MEM)
2104 dst = gen_reg_rtx (GET_MODE (orig_dst));
2105 /* Make life a bit easier for combine. */
2106 emit_move_insn (dst, const0_rtx);
2109 /* Process the pieces. */
2110 for (i = start; i < XVECLEN (src, 0); i++)
2112 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2113 enum machine_mode mode = GET_MODE (tmps[i]);
2114 unsigned int bytelen = GET_MODE_SIZE (mode);
2116 /* Handle trailing fragments that run over the size of the struct. */
2117 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2119 if (BYTES_BIG_ENDIAN)
2121 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2122 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2123 tmps[i], 0, OPTAB_WIDEN);
2125 bytelen = ssize - bytepos;
2128 /* Optimize the access just a bit. */
2129 if (GET_CODE (dst) == MEM
2130 && align >= GET_MODE_ALIGNMENT (mode)
2131 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2132 && bytelen == GET_MODE_SIZE (mode))
2133 emit_move_insn (adjust_address (dst, mode, bytepos), tmps[i]);
2134 else
2135 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2136 mode, tmps[i], align, ssize);
2139 emit_queue ();
2141 /* Copy from the pseudo into the (probable) hard reg. */
2142 if (GET_CODE (dst) == REG)
2143 emit_move_insn (orig_dst, dst);
2146 /* Generate code to copy a BLKmode object of TYPE out of a
2147 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2148 is null, a stack temporary is created. TGTBLK is returned.
2150 The primary purpose of this routine is to handle functions
2151 that return BLKmode structures in registers. Some machines
2152 (the PA for example) want to return all small structures
2153 in registers regardless of the structure's alignment. */
2156 copy_blkmode_from_reg (tgtblk, srcreg, type)
2157 rtx tgtblk;
2158 rtx srcreg;
2159 tree type;
2161 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2162 rtx src = NULL, dst = NULL;
2163 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2164 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2166 if (tgtblk == 0)
2168 tgtblk = assign_temp (build_qualified_type (type,
2169 (TYPE_QUALS (type)
2170 | TYPE_QUAL_CONST)),
2171 0, 1, 1);
2172 preserve_temp_slots (tgtblk);
2175 /* This code assumes srcreg is at least a full word. If it isn't,
2176 copy it into a new pseudo which is a full word. */
2177 if (GET_MODE (srcreg) != BLKmode
2178 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2179 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2181 /* Structures whose size is not a multiple of a word are aligned
2182 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2183 machine, this means we must skip the empty high order bytes when
2184 calculating the bit offset. */
2185 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2186 big_endian_correction
2187 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2189 /* Copy the structure BITSIZE bites at a time.
2191 We could probably emit more efficient code for machines which do not use
2192 strict alignment, but it doesn't seem worth the effort at the current
2193 time. */
2194 for (bitpos = 0, xbitpos = big_endian_correction;
2195 bitpos < bytes * BITS_PER_UNIT;
2196 bitpos += bitsize, xbitpos += bitsize)
2198 /* We need a new source operand each time xbitpos is on a
2199 word boundary and when xbitpos == big_endian_correction
2200 (the first time through). */
2201 if (xbitpos % BITS_PER_WORD == 0
2202 || xbitpos == big_endian_correction)
2203 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2204 GET_MODE (srcreg));
2206 /* We need a new destination operand each time bitpos is on
2207 a word boundary. */
2208 if (bitpos % BITS_PER_WORD == 0)
2209 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2211 /* Use xbitpos for the source extraction (right justified) and
2212 xbitpos for the destination store (left justified). */
2213 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2214 extract_bit_field (src, bitsize,
2215 xbitpos % BITS_PER_WORD, 1,
2216 NULL_RTX, word_mode, word_mode,
2217 bitsize, BITS_PER_WORD),
2218 bitsize, BITS_PER_WORD);
2221 return tgtblk;
2224 /* Add a USE expression for REG to the (possibly empty) list pointed
2225 to by CALL_FUSAGE. REG must denote a hard register. */
2227 void
2228 use_reg (call_fusage, reg)
2229 rtx *call_fusage, reg;
2231 if (GET_CODE (reg) != REG
2232 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2233 abort ();
2235 *call_fusage
2236 = gen_rtx_EXPR_LIST (VOIDmode,
2237 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2240 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2241 starting at REGNO. All of these registers must be hard registers. */
2243 void
2244 use_regs (call_fusage, regno, nregs)
2245 rtx *call_fusage;
2246 int regno;
2247 int nregs;
2249 int i;
2251 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2252 abort ();
2254 for (i = 0; i < nregs; i++)
2255 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2258 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2259 PARALLEL REGS. This is for calls that pass values in multiple
2260 non-contiguous locations. The Irix 6 ABI has examples of this. */
2262 void
2263 use_group_regs (call_fusage, regs)
2264 rtx *call_fusage;
2265 rtx regs;
2267 int i;
2269 for (i = 0; i < XVECLEN (regs, 0); i++)
2271 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2273 /* A NULL entry means the parameter goes both on the stack and in
2274 registers. This can also be a MEM for targets that pass values
2275 partially on the stack and partially in registers. */
2276 if (reg != 0 && GET_CODE (reg) == REG)
2277 use_reg (call_fusage, reg);
2283 can_store_by_pieces (len, constfun, constfundata, align)
2284 unsigned HOST_WIDE_INT len;
2285 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2286 PTR constfundata;
2287 unsigned int align;
2289 unsigned HOST_WIDE_INT max_size, l;
2290 HOST_WIDE_INT offset = 0;
2291 enum machine_mode mode, tmode;
2292 enum insn_code icode;
2293 int reverse;
2294 rtx cst;
2296 if (! MOVE_BY_PIECES_P (len, align))
2297 return 0;
2299 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2300 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2301 align = MOVE_MAX * BITS_PER_UNIT;
2303 /* We would first store what we can in the largest integer mode, then go to
2304 successively smaller modes. */
2306 for (reverse = 0;
2307 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2308 reverse++)
2310 l = len;
2311 mode = VOIDmode;
2312 max_size = MOVE_MAX_PIECES + 1;
2313 while (max_size > 1)
2315 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2316 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2317 if (GET_MODE_SIZE (tmode) < max_size)
2318 mode = tmode;
2320 if (mode == VOIDmode)
2321 break;
2323 icode = mov_optab->handlers[(int) mode].insn_code;
2324 if (icode != CODE_FOR_nothing
2325 && align >= GET_MODE_ALIGNMENT (mode))
2327 unsigned int size = GET_MODE_SIZE (mode);
2329 while (l >= size)
2331 if (reverse)
2332 offset -= size;
2334 cst = (*constfun) (constfundata, offset, mode);
2335 if (!LEGITIMATE_CONSTANT_P (cst))
2336 return 0;
2338 if (!reverse)
2339 offset += size;
2341 l -= size;
2345 max_size = GET_MODE_SIZE (mode);
2348 /* The code above should have handled everything. */
2349 if (l != 0)
2350 abort ();
2353 return 1;
2356 /* Generate several move instructions to store LEN bytes generated by
2357 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2358 pointer which will be passed as argument in every CONSTFUN call.
2359 ALIGN is maximum alignment we can assume. */
2361 void
2362 store_by_pieces (to, len, constfun, constfundata, align)
2363 rtx to;
2364 unsigned HOST_WIDE_INT len;
2365 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2366 PTR constfundata;
2367 unsigned int align;
2369 struct store_by_pieces data;
2371 if (! MOVE_BY_PIECES_P (len, align))
2372 abort ();
2373 to = protect_from_queue (to, 1);
2374 data.constfun = constfun;
2375 data.constfundata = constfundata;
2376 data.len = len;
2377 data.to = to;
2378 store_by_pieces_1 (&data, align);
2381 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2382 rtx with BLKmode). The caller must pass TO through protect_from_queue
2383 before calling. ALIGN is maximum alignment we can assume. */
2385 static void
2386 clear_by_pieces (to, len, align)
2387 rtx to;
2388 unsigned HOST_WIDE_INT len;
2389 unsigned int align;
2391 struct store_by_pieces data;
2393 data.constfun = clear_by_pieces_1;
2394 data.constfundata = NULL;
2395 data.len = len;
2396 data.to = to;
2397 store_by_pieces_1 (&data, align);
2400 /* Callback routine for clear_by_pieces.
2401 Return const0_rtx unconditionally. */
2403 static rtx
2404 clear_by_pieces_1 (data, offset, mode)
2405 PTR data ATTRIBUTE_UNUSED;
2406 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2407 enum machine_mode mode ATTRIBUTE_UNUSED;
2409 return const0_rtx;
2412 /* Subroutine of clear_by_pieces and store_by_pieces.
2413 Generate several move instructions to store LEN bytes of block TO. (A MEM
2414 rtx with BLKmode). The caller must pass TO through protect_from_queue
2415 before calling. ALIGN is maximum alignment we can assume. */
2417 static void
2418 store_by_pieces_1 (data, align)
2419 struct store_by_pieces *data;
2420 unsigned int align;
2422 rtx to_addr = XEXP (data->to, 0);
2423 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2424 enum machine_mode mode = VOIDmode, tmode;
2425 enum insn_code icode;
2427 data->offset = 0;
2428 data->to_addr = to_addr;
2429 data->autinc_to
2430 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2431 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2433 data->explicit_inc_to = 0;
2434 data->reverse
2435 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2436 if (data->reverse)
2437 data->offset = data->len;
2439 /* If storing requires more than two move insns,
2440 copy addresses to registers (to make displacements shorter)
2441 and use post-increment if available. */
2442 if (!data->autinc_to
2443 && move_by_pieces_ninsns (data->len, align) > 2)
2445 /* Determine the main mode we'll be using. */
2446 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2447 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2448 if (GET_MODE_SIZE (tmode) < max_size)
2449 mode = tmode;
2451 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2453 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2454 data->autinc_to = 1;
2455 data->explicit_inc_to = -1;
2458 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2459 && ! data->autinc_to)
2461 data->to_addr = copy_addr_to_reg (to_addr);
2462 data->autinc_to = 1;
2463 data->explicit_inc_to = 1;
2466 if ( !data->autinc_to && CONSTANT_P (to_addr))
2467 data->to_addr = copy_addr_to_reg (to_addr);
2470 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2471 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2472 align = MOVE_MAX * BITS_PER_UNIT;
2474 /* First store what we can in the largest integer mode, then go to
2475 successively smaller modes. */
2477 while (max_size > 1)
2479 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2480 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2481 if (GET_MODE_SIZE (tmode) < max_size)
2482 mode = tmode;
2484 if (mode == VOIDmode)
2485 break;
2487 icode = mov_optab->handlers[(int) mode].insn_code;
2488 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2489 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2491 max_size = GET_MODE_SIZE (mode);
2494 /* The code above should have handled everything. */
2495 if (data->len != 0)
2496 abort ();
2499 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2500 with move instructions for mode MODE. GENFUN is the gen_... function
2501 to make a move insn for that mode. DATA has all the other info. */
2503 static void
2504 store_by_pieces_2 (genfun, mode, data)
2505 rtx (*genfun) PARAMS ((rtx, ...));
2506 enum machine_mode mode;
2507 struct store_by_pieces *data;
2509 unsigned int size = GET_MODE_SIZE (mode);
2510 rtx to1, cst;
2512 while (data->len >= size)
2514 if (data->reverse)
2515 data->offset -= size;
2517 if (data->autinc_to)
2519 to1 = replace_equiv_address (data->to, data->to_addr);
2520 to1 = adjust_address (to1, mode, 0);
2522 else
2523 to1 = adjust_address (data->to, mode, data->offset);
2525 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2526 emit_insn (gen_add2_insn (data->to_addr,
2527 GEN_INT (-(HOST_WIDE_INT) size)));
2529 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2530 emit_insn ((*genfun) (to1, cst));
2532 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2533 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2535 if (! data->reverse)
2536 data->offset += size;
2538 data->len -= size;
2542 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2543 its length in bytes and ALIGN is the maximum alignment we can is has.
2545 If we call a function that returns the length of the block, return it. */
2548 clear_storage (object, size, align)
2549 rtx object;
2550 rtx size;
2551 unsigned int align;
2553 #ifdef TARGET_MEM_FUNCTIONS
2554 static tree fn;
2555 tree call_expr, arg_list;
2556 #endif
2557 rtx retval = 0;
2559 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2560 just move a zero. Otherwise, do this a piece at a time. */
2561 if (GET_MODE (object) != BLKmode
2562 && GET_CODE (size) == CONST_INT
2563 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2564 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2565 else
2567 object = protect_from_queue (object, 1);
2568 size = protect_from_queue (size, 0);
2570 if (GET_CODE (size) == CONST_INT
2571 && MOVE_BY_PIECES_P (INTVAL (size), align))
2572 clear_by_pieces (object, INTVAL (size), align);
2573 else
2575 /* Try the most limited insn first, because there's no point
2576 including more than one in the machine description unless
2577 the more limited one has some advantage. */
2579 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2580 enum machine_mode mode;
2582 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2583 mode = GET_MODE_WIDER_MODE (mode))
2585 enum insn_code code = clrstr_optab[(int) mode];
2586 insn_operand_predicate_fn pred;
2588 if (code != CODE_FOR_nothing
2589 /* We don't need MODE to be narrower than
2590 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2591 the mode mask, as it is returned by the macro, it will
2592 definitely be less than the actual mode mask. */
2593 && ((GET_CODE (size) == CONST_INT
2594 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2595 <= (GET_MODE_MASK (mode) >> 1)))
2596 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2597 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2598 || (*pred) (object, BLKmode))
2599 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2600 || (*pred) (opalign, VOIDmode)))
2602 rtx op1;
2603 rtx last = get_last_insn ();
2604 rtx pat;
2606 op1 = convert_to_mode (mode, size, 1);
2607 pred = insn_data[(int) code].operand[1].predicate;
2608 if (pred != 0 && ! (*pred) (op1, mode))
2609 op1 = copy_to_mode_reg (mode, op1);
2611 pat = GEN_FCN ((int) code) (object, op1, opalign);
2612 if (pat)
2614 emit_insn (pat);
2615 return 0;
2617 else
2618 delete_insns_since (last);
2622 /* OBJECT or SIZE may have been passed through protect_from_queue.
2624 It is unsafe to save the value generated by protect_from_queue
2625 and reuse it later. Consider what happens if emit_queue is
2626 called before the return value from protect_from_queue is used.
2628 Expansion of the CALL_EXPR below will call emit_queue before
2629 we are finished emitting RTL for argument setup. So if we are
2630 not careful we could get the wrong value for an argument.
2632 To avoid this problem we go ahead and emit code to copy OBJECT
2633 and SIZE into new pseudos. We can then place those new pseudos
2634 into an RTL_EXPR and use them later, even after a call to
2635 emit_queue.
2637 Note this is not strictly needed for library calls since they
2638 do not call emit_queue before loading their arguments. However,
2639 we may need to have library calls call emit_queue in the future
2640 since failing to do so could cause problems for targets which
2641 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2642 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2644 #ifdef TARGET_MEM_FUNCTIONS
2645 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2646 #else
2647 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2648 TREE_UNSIGNED (integer_type_node));
2649 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2650 #endif
2652 #ifdef TARGET_MEM_FUNCTIONS
2653 /* It is incorrect to use the libcall calling conventions to call
2654 memset in this context.
2656 This could be a user call to memset and the user may wish to
2657 examine the return value from memset.
2659 For targets where libcalls and normal calls have different
2660 conventions for returning pointers, we could end up generating
2661 incorrect code.
2663 So instead of using a libcall sequence we build up a suitable
2664 CALL_EXPR and expand the call in the normal fashion. */
2665 if (fn == NULL_TREE)
2667 tree fntype;
2669 /* This was copied from except.c, I don't know if all this is
2670 necessary in this context or not. */
2671 fn = get_identifier ("memset");
2672 fntype = build_pointer_type (void_type_node);
2673 fntype = build_function_type (fntype, NULL_TREE);
2674 fn = build_decl (FUNCTION_DECL, fn, fntype);
2675 ggc_add_tree_root (&fn, 1);
2676 DECL_EXTERNAL (fn) = 1;
2677 TREE_PUBLIC (fn) = 1;
2678 DECL_ARTIFICIAL (fn) = 1;
2679 TREE_NOTHROW (fn) = 1;
2680 make_decl_rtl (fn, NULL);
2681 assemble_external (fn);
2684 /* We need to make an argument list for the function call.
2686 memset has three arguments, the first is a void * addresses, the
2687 second an integer with the initialization value, the last is a
2688 size_t byte count for the copy. */
2689 arg_list
2690 = build_tree_list (NULL_TREE,
2691 make_tree (build_pointer_type (void_type_node),
2692 object));
2693 TREE_CHAIN (arg_list)
2694 = build_tree_list (NULL_TREE,
2695 make_tree (integer_type_node, const0_rtx));
2696 TREE_CHAIN (TREE_CHAIN (arg_list))
2697 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2698 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2700 /* Now we have to build up the CALL_EXPR itself. */
2701 call_expr = build1 (ADDR_EXPR,
2702 build_pointer_type (TREE_TYPE (fn)), fn);
2703 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2704 call_expr, arg_list, NULL_TREE);
2705 TREE_SIDE_EFFECTS (call_expr) = 1;
2707 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2708 #else
2709 emit_library_call (bzero_libfunc, LCT_NORMAL,
2710 VOIDmode, 2, object, Pmode, size,
2711 TYPE_MODE (integer_type_node));
2712 #endif
2716 return retval;
2719 /* Generate code to copy Y into X.
2720 Both Y and X must have the same mode, except that
2721 Y can be a constant with VOIDmode.
2722 This mode cannot be BLKmode; use emit_block_move for that.
2724 Return the last instruction emitted. */
2727 emit_move_insn (x, y)
2728 rtx x, y;
2730 enum machine_mode mode = GET_MODE (x);
2731 rtx y_cst = NULL_RTX;
2732 rtx last_insn;
2734 x = protect_from_queue (x, 1);
2735 y = protect_from_queue (y, 0);
2737 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2738 abort ();
2740 /* Never force constant_p_rtx to memory. */
2741 if (GET_CODE (y) == CONSTANT_P_RTX)
2743 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2745 y_cst = y;
2746 y = force_const_mem (mode, y);
2749 /* If X or Y are memory references, verify that their addresses are valid
2750 for the machine. */
2751 if (GET_CODE (x) == MEM
2752 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2753 && ! push_operand (x, GET_MODE (x)))
2754 || (flag_force_addr
2755 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2756 x = validize_mem (x);
2758 if (GET_CODE (y) == MEM
2759 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2760 || (flag_force_addr
2761 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2762 y = validize_mem (y);
2764 if (mode == BLKmode)
2765 abort ();
2767 last_insn = emit_move_insn_1 (x, y);
2769 if (y_cst && GET_CODE (x) == REG)
2770 REG_NOTES (last_insn)
2771 = gen_rtx_EXPR_LIST (REG_EQUAL, y_cst, REG_NOTES (last_insn));
2773 return last_insn;
2776 /* Low level part of emit_move_insn.
2777 Called just like emit_move_insn, but assumes X and Y
2778 are basically valid. */
2781 emit_move_insn_1 (x, y)
2782 rtx x, y;
2784 enum machine_mode mode = GET_MODE (x);
2785 enum machine_mode submode;
2786 enum mode_class class = GET_MODE_CLASS (mode);
2787 unsigned int i;
2789 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2790 abort ();
2792 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2793 return
2794 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2796 /* Expand complex moves by moving real part and imag part, if possible. */
2797 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2798 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2799 * BITS_PER_UNIT),
2800 (class == MODE_COMPLEX_INT
2801 ? MODE_INT : MODE_FLOAT),
2803 && (mov_optab->handlers[(int) submode].insn_code
2804 != CODE_FOR_nothing))
2806 /* Don't split destination if it is a stack push. */
2807 int stack = push_operand (x, GET_MODE (x));
2809 #ifdef PUSH_ROUNDING
2810 /* In case we output to the stack, but the size is smaller machine can
2811 push exactly, we need to use move instructions. */
2812 if (stack
2813 && PUSH_ROUNDING (GET_MODE_SIZE (submode)) != GET_MODE_SIZE (submode))
2815 rtx temp;
2816 int offset1, offset2;
2818 /* Do not use anti_adjust_stack, since we don't want to update
2819 stack_pointer_delta. */
2820 temp = expand_binop (Pmode,
2821 #ifdef STACK_GROWS_DOWNWARD
2822 sub_optab,
2823 #else
2824 add_optab,
2825 #endif
2826 stack_pointer_rtx,
2827 GEN_INT
2828 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2829 stack_pointer_rtx,
2831 OPTAB_LIB_WIDEN);
2832 if (temp != stack_pointer_rtx)
2833 emit_move_insn (stack_pointer_rtx, temp);
2834 #ifdef STACK_GROWS_DOWNWARD
2835 offset1 = 0;
2836 offset2 = GET_MODE_SIZE (submode);
2837 #else
2838 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2839 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2840 + GET_MODE_SIZE (submode));
2841 #endif
2842 emit_move_insn (change_address (x, submode,
2843 gen_rtx_PLUS (Pmode,
2844 stack_pointer_rtx,
2845 GEN_INT (offset1))),
2846 gen_realpart (submode, y));
2847 emit_move_insn (change_address (x, submode,
2848 gen_rtx_PLUS (Pmode,
2849 stack_pointer_rtx,
2850 GEN_INT (offset2))),
2851 gen_imagpart (submode, y));
2853 else
2854 #endif
2855 /* If this is a stack, push the highpart first, so it
2856 will be in the argument order.
2858 In that case, change_address is used only to convert
2859 the mode, not to change the address. */
2860 if (stack)
2862 /* Note that the real part always precedes the imag part in memory
2863 regardless of machine's endianness. */
2864 #ifdef STACK_GROWS_DOWNWARD
2865 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2866 (gen_rtx_MEM (submode, XEXP (x, 0)),
2867 gen_imagpart (submode, y)));
2868 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2869 (gen_rtx_MEM (submode, XEXP (x, 0)),
2870 gen_realpart (submode, y)));
2871 #else
2872 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2873 (gen_rtx_MEM (submode, XEXP (x, 0)),
2874 gen_realpart (submode, y)));
2875 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2876 (gen_rtx_MEM (submode, XEXP (x, 0)),
2877 gen_imagpart (submode, y)));
2878 #endif
2880 else
2882 rtx realpart_x, realpart_y;
2883 rtx imagpart_x, imagpart_y;
2885 /* If this is a complex value with each part being smaller than a
2886 word, the usual calling sequence will likely pack the pieces into
2887 a single register. Unfortunately, SUBREG of hard registers only
2888 deals in terms of words, so we have a problem converting input
2889 arguments to the CONCAT of two registers that is used elsewhere
2890 for complex values. If this is before reload, we can copy it into
2891 memory and reload. FIXME, we should see about using extract and
2892 insert on integer registers, but complex short and complex char
2893 variables should be rarely used. */
2894 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2895 && (reload_in_progress | reload_completed) == 0)
2897 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2898 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2900 if (packed_dest_p || packed_src_p)
2902 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2903 ? MODE_FLOAT : MODE_INT);
2905 enum machine_mode reg_mode
2906 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2908 if (reg_mode != BLKmode)
2910 rtx mem = assign_stack_temp (reg_mode,
2911 GET_MODE_SIZE (mode), 0);
2912 rtx cmem = adjust_address (mem, mode, 0);
2914 cfun->cannot_inline
2915 = N_("function using short complex types cannot be inline");
2917 if (packed_dest_p)
2919 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2920 emit_move_insn_1 (cmem, y);
2921 return emit_move_insn_1 (sreg, mem);
2923 else
2925 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2926 emit_move_insn_1 (mem, sreg);
2927 return emit_move_insn_1 (x, cmem);
2933 realpart_x = gen_realpart (submode, x);
2934 realpart_y = gen_realpart (submode, y);
2935 imagpart_x = gen_imagpart (submode, x);
2936 imagpart_y = gen_imagpart (submode, y);
2938 /* Show the output dies here. This is necessary for SUBREGs
2939 of pseudos since we cannot track their lifetimes correctly;
2940 hard regs shouldn't appear here except as return values.
2941 We never want to emit such a clobber after reload. */
2942 if (x != y
2943 && ! (reload_in_progress || reload_completed)
2944 && (GET_CODE (realpart_x) == SUBREG
2945 || GET_CODE (imagpart_x) == SUBREG))
2947 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2950 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2951 (realpart_x, realpart_y));
2952 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2953 (imagpart_x, imagpart_y));
2956 return get_last_insn ();
2959 /* This will handle any multi-word mode that lacks a move_insn pattern.
2960 However, you will get better code if you define such patterns,
2961 even if they must turn into multiple assembler instructions. */
2962 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2964 rtx last_insn = 0;
2965 rtx seq, inner;
2966 int need_clobber;
2968 #ifdef PUSH_ROUNDING
2970 /* If X is a push on the stack, do the push now and replace
2971 X with a reference to the stack pointer. */
2972 if (push_operand (x, GET_MODE (x)))
2974 rtx temp;
2975 enum rtx_code code;
2977 /* Do not use anti_adjust_stack, since we don't want to update
2978 stack_pointer_delta. */
2979 temp = expand_binop (Pmode,
2980 #ifdef STACK_GROWS_DOWNWARD
2981 sub_optab,
2982 #else
2983 add_optab,
2984 #endif
2985 stack_pointer_rtx,
2986 GEN_INT
2987 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2988 stack_pointer_rtx,
2990 OPTAB_LIB_WIDEN);
2991 if (temp != stack_pointer_rtx)
2992 emit_move_insn (stack_pointer_rtx, temp);
2994 code = GET_CODE (XEXP (x, 0));
2995 /* Just hope that small offsets off SP are OK. */
2996 if (code == POST_INC)
2997 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2998 GEN_INT (-(HOST_WIDE_INT)
2999 GET_MODE_SIZE (GET_MODE (x))));
3000 else if (code == POST_DEC)
3001 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3002 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3003 else
3004 temp = stack_pointer_rtx;
3006 x = change_address (x, VOIDmode, temp);
3008 #endif
3010 /* If we are in reload, see if either operand is a MEM whose address
3011 is scheduled for replacement. */
3012 if (reload_in_progress && GET_CODE (x) == MEM
3013 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3014 x = replace_equiv_address_nv (x, inner);
3015 if (reload_in_progress && GET_CODE (y) == MEM
3016 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3017 y = replace_equiv_address_nv (y, inner);
3019 start_sequence ();
3021 need_clobber = 0;
3022 for (i = 0;
3023 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3024 i++)
3026 rtx xpart = operand_subword (x, i, 1, mode);
3027 rtx ypart = operand_subword (y, i, 1, mode);
3029 /* If we can't get a part of Y, put Y into memory if it is a
3030 constant. Otherwise, force it into a register. If we still
3031 can't get a part of Y, abort. */
3032 if (ypart == 0 && CONSTANT_P (y))
3034 y = force_const_mem (mode, y);
3035 ypart = operand_subword (y, i, 1, mode);
3037 else if (ypart == 0)
3038 ypart = operand_subword_force (y, i, mode);
3040 if (xpart == 0 || ypart == 0)
3041 abort ();
3043 need_clobber |= (GET_CODE (xpart) == SUBREG);
3045 last_insn = emit_move_insn (xpart, ypart);
3048 seq = gen_sequence ();
3049 end_sequence ();
3051 /* Show the output dies here. This is necessary for SUBREGs
3052 of pseudos since we cannot track their lifetimes correctly;
3053 hard regs shouldn't appear here except as return values.
3054 We never want to emit such a clobber after reload. */
3055 if (x != y
3056 && ! (reload_in_progress || reload_completed)
3057 && need_clobber != 0)
3059 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3062 emit_insn (seq);
3064 return last_insn;
3066 else
3067 abort ();
3070 /* Pushing data onto the stack. */
3072 /* Push a block of length SIZE (perhaps variable)
3073 and return an rtx to address the beginning of the block.
3074 Note that it is not possible for the value returned to be a QUEUED.
3075 The value may be virtual_outgoing_args_rtx.
3077 EXTRA is the number of bytes of padding to push in addition to SIZE.
3078 BELOW nonzero means this padding comes at low addresses;
3079 otherwise, the padding comes at high addresses. */
3082 push_block (size, extra, below)
3083 rtx size;
3084 int extra, below;
3086 rtx temp;
3088 size = convert_modes (Pmode, ptr_mode, size, 1);
3089 if (CONSTANT_P (size))
3090 anti_adjust_stack (plus_constant (size, extra));
3091 else if (GET_CODE (size) == REG && extra == 0)
3092 anti_adjust_stack (size);
3093 else
3095 temp = copy_to_mode_reg (Pmode, size);
3096 if (extra != 0)
3097 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3098 temp, 0, OPTAB_LIB_WIDEN);
3099 anti_adjust_stack (temp);
3102 #ifndef STACK_GROWS_DOWNWARD
3103 if (0)
3104 #else
3105 if (1)
3106 #endif
3108 temp = virtual_outgoing_args_rtx;
3109 if (extra != 0 && below)
3110 temp = plus_constant (temp, extra);
3112 else
3114 if (GET_CODE (size) == CONST_INT)
3115 temp = plus_constant (virtual_outgoing_args_rtx,
3116 -INTVAL (size) - (below ? 0 : extra));
3117 else if (extra != 0 && !below)
3118 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3119 negate_rtx (Pmode, plus_constant (size, extra)));
3120 else
3121 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3122 negate_rtx (Pmode, size));
3125 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3129 /* Return an rtx for the address of the beginning of an as-if-it-was-pushed
3130 block of SIZE bytes. */
3132 static rtx
3133 get_push_address (size)
3134 int size;
3136 rtx temp;
3138 if (STACK_PUSH_CODE == POST_DEC)
3139 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3140 else if (STACK_PUSH_CODE == POST_INC)
3141 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3142 else
3143 temp = stack_pointer_rtx;
3145 return copy_to_reg (temp);
3148 #ifdef PUSH_ROUNDING
3150 /* Emit single push insn. */
3152 static void
3153 emit_single_push_insn (mode, x, type)
3154 rtx x;
3155 enum machine_mode mode;
3156 tree type;
3158 rtx dest_addr;
3159 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3160 rtx dest;
3161 enum insn_code icode;
3162 insn_operand_predicate_fn pred;
3164 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3165 /* If there is push pattern, use it. Otherwise try old way of throwing
3166 MEM representing push operation to move expander. */
3167 icode = push_optab->handlers[(int) mode].insn_code;
3168 if (icode != CODE_FOR_nothing)
3170 if (((pred = insn_data[(int) icode].operand[0].predicate)
3171 && !((*pred) (x, mode))))
3172 x = force_reg (mode, x);
3173 emit_insn (GEN_FCN (icode) (x));
3174 return;
3176 if (GET_MODE_SIZE (mode) == rounded_size)
3177 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3178 else
3180 #ifdef STACK_GROWS_DOWNWARD
3181 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3182 GEN_INT (-(HOST_WIDE_INT)rounded_size));
3183 #else
3184 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3185 GEN_INT (rounded_size));
3186 #endif
3187 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3190 dest = gen_rtx_MEM (mode, dest_addr);
3192 if (type != 0)
3194 set_mem_attributes (dest, type, 1);
3195 /* Function incoming arguments may overlap with sibling call
3196 outgoing arguments and we cannot allow reordering of reads
3197 from function arguments with stores to outgoing arguments
3198 of sibling calls. */
3199 set_mem_alias_set (dest, 0);
3201 emit_move_insn (dest, x);
3203 #endif
3205 /* Generate code to push X onto the stack, assuming it has mode MODE and
3206 type TYPE.
3207 MODE is redundant except when X is a CONST_INT (since they don't
3208 carry mode info).
3209 SIZE is an rtx for the size of data to be copied (in bytes),
3210 needed only if X is BLKmode.
3212 ALIGN (in bits) is maximum alignment we can assume.
3214 If PARTIAL and REG are both nonzero, then copy that many of the first
3215 words of X into registers starting with REG, and push the rest of X.
3216 The amount of space pushed is decreased by PARTIAL words,
3217 rounded *down* to a multiple of PARM_BOUNDARY.
3218 REG must be a hard register in this case.
3219 If REG is zero but PARTIAL is not, take any all others actions for an
3220 argument partially in registers, but do not actually load any
3221 registers.
3223 EXTRA is the amount in bytes of extra space to leave next to this arg.
3224 This is ignored if an argument block has already been allocated.
3226 On a machine that lacks real push insns, ARGS_ADDR is the address of
3227 the bottom of the argument block for this call. We use indexing off there
3228 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3229 argument block has not been preallocated.
3231 ARGS_SO_FAR is the size of args previously pushed for this call.
3233 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3234 for arguments passed in registers. If nonzero, it will be the number
3235 of bytes required. */
3237 void
3238 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3239 args_addr, args_so_far, reg_parm_stack_space,
3240 alignment_pad)
3241 rtx x;
3242 enum machine_mode mode;
3243 tree type;
3244 rtx size;
3245 unsigned int align;
3246 int partial;
3247 rtx reg;
3248 int extra;
3249 rtx args_addr;
3250 rtx args_so_far;
3251 int reg_parm_stack_space;
3252 rtx alignment_pad;
3254 rtx xinner;
3255 enum direction stack_direction
3256 #ifdef STACK_GROWS_DOWNWARD
3257 = downward;
3258 #else
3259 = upward;
3260 #endif
3262 /* Decide where to pad the argument: `downward' for below,
3263 `upward' for above, or `none' for don't pad it.
3264 Default is below for small data on big-endian machines; else above. */
3265 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3267 /* Invert direction if stack is post-decrement.
3268 FIXME: why? */
3269 if (STACK_PUSH_CODE == POST_DEC)
3270 if (where_pad != none)
3271 where_pad = (where_pad == downward ? upward : downward);
3273 xinner = x = protect_from_queue (x, 0);
3275 if (mode == BLKmode)
3277 /* Copy a block into the stack, entirely or partially. */
3279 rtx temp;
3280 int used = partial * UNITS_PER_WORD;
3281 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3282 int skip;
3284 if (size == 0)
3285 abort ();
3287 used -= offset;
3289 /* USED is now the # of bytes we need not copy to the stack
3290 because registers will take care of them. */
3292 if (partial != 0)
3293 xinner = adjust_address (xinner, BLKmode, used);
3295 /* If the partial register-part of the arg counts in its stack size,
3296 skip the part of stack space corresponding to the registers.
3297 Otherwise, start copying to the beginning of the stack space,
3298 by setting SKIP to 0. */
3299 skip = (reg_parm_stack_space == 0) ? 0 : used;
3301 #ifdef PUSH_ROUNDING
3302 /* Do it with several push insns if that doesn't take lots of insns
3303 and if there is no difficulty with push insns that skip bytes
3304 on the stack for alignment purposes. */
3305 if (args_addr == 0
3306 && PUSH_ARGS
3307 && GET_CODE (size) == CONST_INT
3308 && skip == 0
3309 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3310 /* Here we avoid the case of a structure whose weak alignment
3311 forces many pushes of a small amount of data,
3312 and such small pushes do rounding that causes trouble. */
3313 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3314 || align >= BIGGEST_ALIGNMENT
3315 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3316 == (align / BITS_PER_UNIT)))
3317 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3319 /* Push padding now if padding above and stack grows down,
3320 or if padding below and stack grows up.
3321 But if space already allocated, this has already been done. */
3322 if (extra && args_addr == 0
3323 && where_pad != none && where_pad != stack_direction)
3324 anti_adjust_stack (GEN_INT (extra));
3326 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3328 if (current_function_check_memory_usage && ! in_check_memory_usage)
3330 rtx temp;
3332 in_check_memory_usage = 1;
3333 temp = get_push_address (INTVAL (size) - used);
3334 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3335 emit_library_call (chkr_copy_bitmap_libfunc,
3336 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3337 Pmode, XEXP (xinner, 0), Pmode,
3338 GEN_INT (INTVAL (size) - used),
3339 TYPE_MODE (sizetype));
3340 else
3341 emit_library_call (chkr_set_right_libfunc,
3342 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3343 Pmode, GEN_INT (INTVAL (size) - used),
3344 TYPE_MODE (sizetype),
3345 GEN_INT (MEMORY_USE_RW),
3346 TYPE_MODE (integer_type_node));
3347 in_check_memory_usage = 0;
3350 else
3351 #endif /* PUSH_ROUNDING */
3353 rtx target;
3355 /* Otherwise make space on the stack and copy the data
3356 to the address of that space. */
3358 /* Deduct words put into registers from the size we must copy. */
3359 if (partial != 0)
3361 if (GET_CODE (size) == CONST_INT)
3362 size = GEN_INT (INTVAL (size) - used);
3363 else
3364 size = expand_binop (GET_MODE (size), sub_optab, size,
3365 GEN_INT (used), NULL_RTX, 0,
3366 OPTAB_LIB_WIDEN);
3369 /* Get the address of the stack space.
3370 In this case, we do not deal with EXTRA separately.
3371 A single stack adjust will do. */
3372 if (! args_addr)
3374 temp = push_block (size, extra, where_pad == downward);
3375 extra = 0;
3377 else if (GET_CODE (args_so_far) == CONST_INT)
3378 temp = memory_address (BLKmode,
3379 plus_constant (args_addr,
3380 skip + INTVAL (args_so_far)));
3381 else
3382 temp = memory_address (BLKmode,
3383 plus_constant (gen_rtx_PLUS (Pmode,
3384 args_addr,
3385 args_so_far),
3386 skip));
3387 if (current_function_check_memory_usage && ! in_check_memory_usage)
3389 in_check_memory_usage = 1;
3390 target = copy_to_reg (temp);
3391 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3392 emit_library_call (chkr_copy_bitmap_libfunc,
3393 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3394 target, Pmode,
3395 XEXP (xinner, 0), Pmode,
3396 size, TYPE_MODE (sizetype));
3397 else
3398 emit_library_call (chkr_set_right_libfunc,
3399 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3400 target, Pmode,
3401 size, TYPE_MODE (sizetype),
3402 GEN_INT (MEMORY_USE_RW),
3403 TYPE_MODE (integer_type_node));
3404 in_check_memory_usage = 0;
3407 target = gen_rtx_MEM (BLKmode, temp);
3409 if (type != 0)
3411 set_mem_attributes (target, type, 1);
3412 /* Function incoming arguments may overlap with sibling call
3413 outgoing arguments and we cannot allow reordering of reads
3414 from function arguments with stores to outgoing arguments
3415 of sibling calls. */
3416 set_mem_alias_set (target, 0);
3419 /* TEMP is the address of the block. Copy the data there. */
3420 if (GET_CODE (size) == CONST_INT
3421 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3423 move_by_pieces (target, xinner, INTVAL (size), align);
3424 goto ret;
3426 else
3428 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3429 enum machine_mode mode;
3431 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3432 mode != VOIDmode;
3433 mode = GET_MODE_WIDER_MODE (mode))
3435 enum insn_code code = movstr_optab[(int) mode];
3436 insn_operand_predicate_fn pred;
3438 if (code != CODE_FOR_nothing
3439 && ((GET_CODE (size) == CONST_INT
3440 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3441 <= (GET_MODE_MASK (mode) >> 1)))
3442 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3443 && (!(pred = insn_data[(int) code].operand[0].predicate)
3444 || ((*pred) (target, BLKmode)))
3445 && (!(pred = insn_data[(int) code].operand[1].predicate)
3446 || ((*pred) (xinner, BLKmode)))
3447 && (!(pred = insn_data[(int) code].operand[3].predicate)
3448 || ((*pred) (opalign, VOIDmode))))
3450 rtx op2 = convert_to_mode (mode, size, 1);
3451 rtx last = get_last_insn ();
3452 rtx pat;
3454 pred = insn_data[(int) code].operand[2].predicate;
3455 if (pred != 0 && ! (*pred) (op2, mode))
3456 op2 = copy_to_mode_reg (mode, op2);
3458 pat = GEN_FCN ((int) code) (target, xinner,
3459 op2, opalign);
3460 if (pat)
3462 emit_insn (pat);
3463 goto ret;
3465 else
3466 delete_insns_since (last);
3471 if (!ACCUMULATE_OUTGOING_ARGS)
3473 /* If the source is referenced relative to the stack pointer,
3474 copy it to another register to stabilize it. We do not need
3475 to do this if we know that we won't be changing sp. */
3477 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3478 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3479 temp = copy_to_reg (temp);
3482 /* Make inhibit_defer_pop nonzero around the library call
3483 to force it to pop the bcopy-arguments right away. */
3484 NO_DEFER_POP;
3485 #ifdef TARGET_MEM_FUNCTIONS
3486 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3487 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3488 convert_to_mode (TYPE_MODE (sizetype),
3489 size, TREE_UNSIGNED (sizetype)),
3490 TYPE_MODE (sizetype));
3491 #else
3492 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3493 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3494 convert_to_mode (TYPE_MODE (integer_type_node),
3495 size,
3496 TREE_UNSIGNED (integer_type_node)),
3497 TYPE_MODE (integer_type_node));
3498 #endif
3499 OK_DEFER_POP;
3502 else if (partial > 0)
3504 /* Scalar partly in registers. */
3506 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3507 int i;
3508 int not_stack;
3509 /* # words of start of argument
3510 that we must make space for but need not store. */
3511 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3512 int args_offset = INTVAL (args_so_far);
3513 int skip;
3515 /* Push padding now if padding above and stack grows down,
3516 or if padding below and stack grows up.
3517 But if space already allocated, this has already been done. */
3518 if (extra && args_addr == 0
3519 && where_pad != none && where_pad != stack_direction)
3520 anti_adjust_stack (GEN_INT (extra));
3522 /* If we make space by pushing it, we might as well push
3523 the real data. Otherwise, we can leave OFFSET nonzero
3524 and leave the space uninitialized. */
3525 if (args_addr == 0)
3526 offset = 0;
3528 /* Now NOT_STACK gets the number of words that we don't need to
3529 allocate on the stack. */
3530 not_stack = partial - offset;
3532 /* If the partial register-part of the arg counts in its stack size,
3533 skip the part of stack space corresponding to the registers.
3534 Otherwise, start copying to the beginning of the stack space,
3535 by setting SKIP to 0. */
3536 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3538 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3539 x = validize_mem (force_const_mem (mode, x));
3541 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3542 SUBREGs of such registers are not allowed. */
3543 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3544 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3545 x = copy_to_reg (x);
3547 /* Loop over all the words allocated on the stack for this arg. */
3548 /* We can do it by words, because any scalar bigger than a word
3549 has a size a multiple of a word. */
3550 #ifndef PUSH_ARGS_REVERSED
3551 for (i = not_stack; i < size; i++)
3552 #else
3553 for (i = size - 1; i >= not_stack; i--)
3554 #endif
3555 if (i >= not_stack + offset)
3556 emit_push_insn (operand_subword_force (x, i, mode),
3557 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3558 0, args_addr,
3559 GEN_INT (args_offset + ((i - not_stack + skip)
3560 * UNITS_PER_WORD)),
3561 reg_parm_stack_space, alignment_pad);
3563 else
3565 rtx addr;
3566 rtx target = NULL_RTX;
3567 rtx dest;
3569 /* Push padding now if padding above and stack grows down,
3570 or if padding below and stack grows up.
3571 But if space already allocated, this has already been done. */
3572 if (extra && args_addr == 0
3573 && where_pad != none && where_pad != stack_direction)
3574 anti_adjust_stack (GEN_INT (extra));
3576 #ifdef PUSH_ROUNDING
3577 if (args_addr == 0 && PUSH_ARGS)
3578 emit_single_push_insn (mode, x, type);
3579 else
3580 #endif
3582 if (GET_CODE (args_so_far) == CONST_INT)
3583 addr
3584 = memory_address (mode,
3585 plus_constant (args_addr,
3586 INTVAL (args_so_far)));
3587 else
3588 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3589 args_so_far));
3590 target = addr;
3591 dest = gen_rtx_MEM (mode, addr);
3592 if (type != 0)
3594 set_mem_attributes (dest, type, 1);
3595 /* Function incoming arguments may overlap with sibling call
3596 outgoing arguments and we cannot allow reordering of reads
3597 from function arguments with stores to outgoing arguments
3598 of sibling calls. */
3599 set_mem_alias_set (dest, 0);
3602 emit_move_insn (dest, x);
3606 if (current_function_check_memory_usage && ! in_check_memory_usage)
3608 in_check_memory_usage = 1;
3609 if (target == 0)
3610 target = get_push_address (GET_MODE_SIZE (mode));
3612 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3613 emit_library_call (chkr_copy_bitmap_libfunc,
3614 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3615 Pmode, XEXP (x, 0), Pmode,
3616 GEN_INT (GET_MODE_SIZE (mode)),
3617 TYPE_MODE (sizetype));
3618 else
3619 emit_library_call (chkr_set_right_libfunc,
3620 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3621 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
3622 TYPE_MODE (sizetype),
3623 GEN_INT (MEMORY_USE_RW),
3624 TYPE_MODE (integer_type_node));
3625 in_check_memory_usage = 0;
3629 ret:
3630 /* If part should go in registers, copy that part
3631 into the appropriate registers. Do this now, at the end,
3632 since mem-to-mem copies above may do function calls. */
3633 if (partial > 0 && reg != 0)
3635 /* Handle calls that pass values in multiple non-contiguous locations.
3636 The Irix 6 ABI has examples of this. */
3637 if (GET_CODE (reg) == PARALLEL)
3638 emit_group_load (reg, x, -1, align); /* ??? size? */
3639 else
3640 move_block_to_reg (REGNO (reg), x, partial, mode);
3643 if (extra && args_addr == 0 && where_pad == stack_direction)
3644 anti_adjust_stack (GEN_INT (extra));
3646 if (alignment_pad && args_addr == 0)
3647 anti_adjust_stack (alignment_pad);
3650 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3651 operations. */
3653 static rtx
3654 get_subtarget (x)
3655 rtx x;
3657 return ((x == 0
3658 /* Only registers can be subtargets. */
3659 || GET_CODE (x) != REG
3660 /* If the register is readonly, it can't be set more than once. */
3661 || RTX_UNCHANGING_P (x)
3662 /* Don't use hard regs to avoid extending their life. */
3663 || REGNO (x) < FIRST_PSEUDO_REGISTER
3664 /* Avoid subtargets inside loops,
3665 since they hide some invariant expressions. */
3666 || preserve_subexpressions_p ())
3667 ? 0 : x);
3670 /* Expand an assignment that stores the value of FROM into TO.
3671 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3672 (This may contain a QUEUED rtx;
3673 if the value is constant, this rtx is a constant.)
3674 Otherwise, the returned value is NULL_RTX.
3676 SUGGEST_REG is no longer actually used.
3677 It used to mean, copy the value through a register
3678 and return that register, if that is possible.
3679 We now use WANT_VALUE to decide whether to do this. */
3682 expand_assignment (to, from, want_value, suggest_reg)
3683 tree to, from;
3684 int want_value;
3685 int suggest_reg ATTRIBUTE_UNUSED;
3687 rtx to_rtx = 0;
3688 rtx result;
3690 /* Don't crash if the lhs of the assignment was erroneous. */
3692 if (TREE_CODE (to) == ERROR_MARK)
3694 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3695 return want_value ? result : NULL_RTX;
3698 /* Assignment of a structure component needs special treatment
3699 if the structure component's rtx is not simply a MEM.
3700 Assignment of an array element at a constant index, and assignment of
3701 an array element in an unaligned packed structure field, has the same
3702 problem. */
3704 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3705 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3707 enum machine_mode mode1;
3708 HOST_WIDE_INT bitsize, bitpos;
3709 tree offset;
3710 int unsignedp;
3711 int volatilep = 0;
3712 tree tem;
3713 unsigned int alignment;
3715 push_temp_slots ();
3716 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3717 &unsignedp, &volatilep, &alignment);
3719 /* If we are going to use store_bit_field and extract_bit_field,
3720 make sure to_rtx will be safe for multiple use. */
3722 if (mode1 == VOIDmode && want_value)
3723 tem = stabilize_reference (tem);
3725 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3726 if (offset != 0)
3728 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3730 if (GET_CODE (to_rtx) != MEM)
3731 abort ();
3733 if (GET_MODE (offset_rtx) != ptr_mode)
3735 #ifdef POINTERS_EXTEND_UNSIGNED
3736 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3737 #else
3738 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3739 #endif
3742 /* A constant address in TO_RTX can have VOIDmode, we must not try
3743 to call force_reg for that case. Avoid that case. */
3744 if (GET_CODE (to_rtx) == MEM
3745 && GET_MODE (to_rtx) == BLKmode
3746 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3747 && bitsize
3748 && (bitpos % bitsize) == 0
3749 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3750 && alignment == GET_MODE_ALIGNMENT (mode1))
3752 rtx temp
3753 = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3755 if (GET_CODE (XEXP (temp, 0)) == REG)
3756 to_rtx = temp;
3757 else
3758 to_rtx = (replace_equiv_address
3759 (to_rtx, force_reg (GET_MODE (XEXP (temp, 0)),
3760 XEXP (temp, 0))));
3761 bitpos = 0;
3764 to_rtx = offset_address (to_rtx, offset_rtx,
3765 highest_pow2_factor (offset));
3768 if (volatilep)
3770 if (GET_CODE (to_rtx) == MEM)
3772 /* When the offset is zero, to_rtx is the address of the
3773 structure we are storing into, and hence may be shared.
3774 We must make a new MEM before setting the volatile bit. */
3775 if (offset == 0)
3776 to_rtx = copy_rtx (to_rtx);
3778 MEM_VOLATILE_P (to_rtx) = 1;
3780 #if 0 /* This was turned off because, when a field is volatile
3781 in an object which is not volatile, the object may be in a register,
3782 and then we would abort over here. */
3783 else
3784 abort ();
3785 #endif
3788 if (TREE_CODE (to) == COMPONENT_REF
3789 && TREE_READONLY (TREE_OPERAND (to, 1)))
3791 if (offset == 0)
3792 to_rtx = copy_rtx (to_rtx);
3794 RTX_UNCHANGING_P (to_rtx) = 1;
3797 /* Check the access. */
3798 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3800 rtx to_addr;
3801 int size;
3802 int best_mode_size;
3803 enum machine_mode best_mode;
3805 best_mode = get_best_mode (bitsize, bitpos,
3806 TYPE_ALIGN (TREE_TYPE (tem)),
3807 mode1, volatilep);
3808 if (best_mode == VOIDmode)
3809 best_mode = QImode;
3811 best_mode_size = GET_MODE_BITSIZE (best_mode);
3812 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3813 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3814 size *= GET_MODE_SIZE (best_mode);
3816 /* Check the access right of the pointer. */
3817 in_check_memory_usage = 1;
3818 if (size)
3819 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3820 VOIDmode, 3, to_addr, Pmode,
3821 GEN_INT (size), TYPE_MODE (sizetype),
3822 GEN_INT (MEMORY_USE_WO),
3823 TYPE_MODE (integer_type_node));
3824 in_check_memory_usage = 0;
3827 /* If this is a varying-length object, we must get the address of
3828 the source and do an explicit block move. */
3829 if (bitsize < 0)
3831 unsigned int from_align;
3832 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3833 rtx inner_to_rtx
3834 = adjust_address (to_rtx, BLKmode, bitpos / BITS_PER_UNIT);
3836 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
3837 MIN (alignment, from_align));
3838 free_temp_slots ();
3839 pop_temp_slots ();
3840 return to_rtx;
3842 else
3844 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3845 (want_value
3846 /* Spurious cast for HPUX compiler. */
3847 ? ((enum machine_mode)
3848 TYPE_MODE (TREE_TYPE (to)))
3849 : VOIDmode),
3850 unsignedp,
3851 alignment,
3852 int_size_in_bytes (TREE_TYPE (tem)),
3853 get_alias_set (to));
3855 preserve_temp_slots (result);
3856 free_temp_slots ();
3857 pop_temp_slots ();
3859 /* If the value is meaningful, convert RESULT to the proper mode.
3860 Otherwise, return nothing. */
3861 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3862 TYPE_MODE (TREE_TYPE (from)),
3863 result,
3864 TREE_UNSIGNED (TREE_TYPE (to)))
3865 : NULL_RTX);
3869 /* If the rhs is a function call and its value is not an aggregate,
3870 call the function before we start to compute the lhs.
3871 This is needed for correct code for cases such as
3872 val = setjmp (buf) on machines where reference to val
3873 requires loading up part of an address in a separate insn.
3875 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3876 since it might be a promoted variable where the zero- or sign- extension
3877 needs to be done. Handling this in the normal way is safe because no
3878 computation is done before the call. */
3879 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3880 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3881 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3882 && GET_CODE (DECL_RTL (to)) == REG))
3884 rtx value;
3886 push_temp_slots ();
3887 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3888 if (to_rtx == 0)
3889 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3891 /* Handle calls that return values in multiple non-contiguous locations.
3892 The Irix 6 ABI has examples of this. */
3893 if (GET_CODE (to_rtx) == PARALLEL)
3894 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3895 TYPE_ALIGN (TREE_TYPE (from)));
3896 else if (GET_MODE (to_rtx) == BLKmode)
3897 emit_block_move (to_rtx, value, expr_size (from),
3898 TYPE_ALIGN (TREE_TYPE (from)));
3899 else
3901 #ifdef POINTERS_EXTEND_UNSIGNED
3902 if (POINTER_TYPE_P (TREE_TYPE (to))
3903 && GET_MODE (to_rtx) != GET_MODE (value))
3904 value = convert_memory_address (GET_MODE (to_rtx), value);
3905 #endif
3906 emit_move_insn (to_rtx, value);
3908 preserve_temp_slots (to_rtx);
3909 free_temp_slots ();
3910 pop_temp_slots ();
3911 return want_value ? to_rtx : NULL_RTX;
3914 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3915 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3917 if (to_rtx == 0)
3919 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3920 if (GET_CODE (to_rtx) == MEM)
3921 set_mem_alias_set (to_rtx, get_alias_set (to));
3924 /* Don't move directly into a return register. */
3925 if (TREE_CODE (to) == RESULT_DECL
3926 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3928 rtx temp;
3930 push_temp_slots ();
3931 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3933 if (GET_CODE (to_rtx) == PARALLEL)
3934 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3935 TYPE_ALIGN (TREE_TYPE (from)));
3936 else
3937 emit_move_insn (to_rtx, temp);
3939 preserve_temp_slots (to_rtx);
3940 free_temp_slots ();
3941 pop_temp_slots ();
3942 return want_value ? to_rtx : NULL_RTX;
3945 /* In case we are returning the contents of an object which overlaps
3946 the place the value is being stored, use a safe function when copying
3947 a value through a pointer into a structure value return block. */
3948 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3949 && current_function_returns_struct
3950 && !current_function_returns_pcc_struct)
3952 rtx from_rtx, size;
3954 push_temp_slots ();
3955 size = expr_size (from);
3956 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3957 EXPAND_MEMORY_USE_DONT);
3959 /* Copy the rights of the bitmap. */
3960 if (current_function_check_memory_usage)
3961 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3962 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3963 XEXP (from_rtx, 0), Pmode,
3964 convert_to_mode (TYPE_MODE (sizetype),
3965 size, TREE_UNSIGNED (sizetype)),
3966 TYPE_MODE (sizetype));
3968 #ifdef TARGET_MEM_FUNCTIONS
3969 emit_library_call (memmove_libfunc, LCT_NORMAL,
3970 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3971 XEXP (from_rtx, 0), Pmode,
3972 convert_to_mode (TYPE_MODE (sizetype),
3973 size, TREE_UNSIGNED (sizetype)),
3974 TYPE_MODE (sizetype));
3975 #else
3976 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3977 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3978 XEXP (to_rtx, 0), Pmode,
3979 convert_to_mode (TYPE_MODE (integer_type_node),
3980 size, TREE_UNSIGNED (integer_type_node)),
3981 TYPE_MODE (integer_type_node));
3982 #endif
3984 preserve_temp_slots (to_rtx);
3985 free_temp_slots ();
3986 pop_temp_slots ();
3987 return want_value ? to_rtx : NULL_RTX;
3990 /* Compute FROM and store the value in the rtx we got. */
3992 push_temp_slots ();
3993 result = store_expr (from, to_rtx, want_value);
3994 preserve_temp_slots (result);
3995 free_temp_slots ();
3996 pop_temp_slots ();
3997 return want_value ? result : NULL_RTX;
4000 /* Generate code for computing expression EXP,
4001 and storing the value into TARGET.
4002 TARGET may contain a QUEUED rtx.
4004 If WANT_VALUE is nonzero, return a copy of the value
4005 not in TARGET, so that we can be sure to use the proper
4006 value in a containing expression even if TARGET has something
4007 else stored in it. If possible, we copy the value through a pseudo
4008 and return that pseudo. Or, if the value is constant, we try to
4009 return the constant. In some cases, we return a pseudo
4010 copied *from* TARGET.
4012 If the mode is BLKmode then we may return TARGET itself.
4013 It turns out that in BLKmode it doesn't cause a problem.
4014 because C has no operators that could combine two different
4015 assignments into the same BLKmode object with different values
4016 with no sequence point. Will other languages need this to
4017 be more thorough?
4019 If WANT_VALUE is 0, we return NULL, to make sure
4020 to catch quickly any cases where the caller uses the value
4021 and fails to set WANT_VALUE. */
4024 store_expr (exp, target, want_value)
4025 tree exp;
4026 rtx target;
4027 int want_value;
4029 rtx temp;
4030 int dont_return_target = 0;
4031 int dont_store_target = 0;
4033 if (TREE_CODE (exp) == COMPOUND_EXPR)
4035 /* Perform first part of compound expression, then assign from second
4036 part. */
4037 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4038 emit_queue ();
4039 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4041 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4043 /* For conditional expression, get safe form of the target. Then
4044 test the condition, doing the appropriate assignment on either
4045 side. This avoids the creation of unnecessary temporaries.
4046 For non-BLKmode, it is more efficient not to do this. */
4048 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4050 emit_queue ();
4051 target = protect_from_queue (target, 1);
4053 do_pending_stack_adjust ();
4054 NO_DEFER_POP;
4055 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4056 start_cleanup_deferral ();
4057 store_expr (TREE_OPERAND (exp, 1), target, 0);
4058 end_cleanup_deferral ();
4059 emit_queue ();
4060 emit_jump_insn (gen_jump (lab2));
4061 emit_barrier ();
4062 emit_label (lab1);
4063 start_cleanup_deferral ();
4064 store_expr (TREE_OPERAND (exp, 2), target, 0);
4065 end_cleanup_deferral ();
4066 emit_queue ();
4067 emit_label (lab2);
4068 OK_DEFER_POP;
4070 return want_value ? target : NULL_RTX;
4072 else if (queued_subexp_p (target))
4073 /* If target contains a postincrement, let's not risk
4074 using it as the place to generate the rhs. */
4076 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4078 /* Expand EXP into a new pseudo. */
4079 temp = gen_reg_rtx (GET_MODE (target));
4080 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4082 else
4083 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
4085 /* If target is volatile, ANSI requires accessing the value
4086 *from* the target, if it is accessed. So make that happen.
4087 In no case return the target itself. */
4088 if (! MEM_VOLATILE_P (target) && want_value)
4089 dont_return_target = 1;
4091 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4092 && GET_MODE (target) != BLKmode)
4093 /* If target is in memory and caller wants value in a register instead,
4094 arrange that. Pass TARGET as target for expand_expr so that,
4095 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4096 We know expand_expr will not use the target in that case.
4097 Don't do this if TARGET is volatile because we are supposed
4098 to write it and then read it. */
4100 temp = expand_expr (exp, target, GET_MODE (target), 0);
4101 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4103 /* If TEMP is already in the desired TARGET, only copy it from
4104 memory and don't store it there again. */
4105 if (temp == target
4106 || (rtx_equal_p (temp, target)
4107 && ! side_effects_p (temp) && ! side_effects_p (target)))
4108 dont_store_target = 1;
4109 temp = copy_to_reg (temp);
4111 dont_return_target = 1;
4113 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4114 /* If this is an scalar in a register that is stored in a wider mode
4115 than the declared mode, compute the result into its declared mode
4116 and then convert to the wider mode. Our value is the computed
4117 expression. */
4119 /* If we don't want a value, we can do the conversion inside EXP,
4120 which will often result in some optimizations. Do the conversion
4121 in two steps: first change the signedness, if needed, then
4122 the extend. But don't do this if the type of EXP is a subtype
4123 of something else since then the conversion might involve
4124 more than just converting modes. */
4125 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4126 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4128 if (TREE_UNSIGNED (TREE_TYPE (exp))
4129 != SUBREG_PROMOTED_UNSIGNED_P (target))
4131 = convert
4132 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4133 TREE_TYPE (exp)),
4134 exp);
4136 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4137 SUBREG_PROMOTED_UNSIGNED_P (target)),
4138 exp);
4141 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4143 /* If TEMP is a volatile MEM and we want a result value, make
4144 the access now so it gets done only once. Likewise if
4145 it contains TARGET. */
4146 if (GET_CODE (temp) == MEM && want_value
4147 && (MEM_VOLATILE_P (temp)
4148 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4149 temp = copy_to_reg (temp);
4151 /* If TEMP is a VOIDmode constant, use convert_modes to make
4152 sure that we properly convert it. */
4153 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4155 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4156 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4157 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4158 GET_MODE (target), temp,
4159 SUBREG_PROMOTED_UNSIGNED_P (target));
4162 convert_move (SUBREG_REG (target), temp,
4163 SUBREG_PROMOTED_UNSIGNED_P (target));
4165 /* If we promoted a constant, change the mode back down to match
4166 target. Otherwise, the caller might get confused by a result whose
4167 mode is larger than expected. */
4169 if (want_value && GET_MODE (temp) != GET_MODE (target)
4170 && GET_MODE (temp) != VOIDmode)
4172 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4173 SUBREG_PROMOTED_VAR_P (temp) = 1;
4174 SUBREG_PROMOTED_UNSIGNED_P (temp)
4175 = SUBREG_PROMOTED_UNSIGNED_P (target);
4178 return want_value ? temp : NULL_RTX;
4180 else
4182 temp = expand_expr (exp, target, GET_MODE (target), 0);
4183 /* Return TARGET if it's a specified hardware register.
4184 If TARGET is a volatile mem ref, either return TARGET
4185 or return a reg copied *from* TARGET; ANSI requires this.
4187 Otherwise, if TEMP is not TARGET, return TEMP
4188 if it is constant (for efficiency),
4189 or if we really want the correct value. */
4190 if (!(target && GET_CODE (target) == REG
4191 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4192 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4193 && ! rtx_equal_p (temp, target)
4194 && (CONSTANT_P (temp) || want_value))
4195 dont_return_target = 1;
4198 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4199 the same as that of TARGET, adjust the constant. This is needed, for
4200 example, in case it is a CONST_DOUBLE and we want only a word-sized
4201 value. */
4202 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4203 && TREE_CODE (exp) != ERROR_MARK
4204 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4205 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4206 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4208 if (current_function_check_memory_usage
4209 && GET_CODE (target) == MEM
4210 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
4212 in_check_memory_usage = 1;
4213 if (GET_CODE (temp) == MEM)
4214 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
4215 VOIDmode, 3, XEXP (target, 0), Pmode,
4216 XEXP (temp, 0), Pmode,
4217 expr_size (exp), TYPE_MODE (sizetype));
4218 else
4219 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
4220 VOIDmode, 3, XEXP (target, 0), Pmode,
4221 expr_size (exp), TYPE_MODE (sizetype),
4222 GEN_INT (MEMORY_USE_WO),
4223 TYPE_MODE (integer_type_node));
4224 in_check_memory_usage = 0;
4227 /* If value was not generated in the target, store it there.
4228 Convert the value to TARGET's type first if nec. */
4229 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4230 one or both of them are volatile memory refs, we have to distinguish
4231 two cases:
4232 - expand_expr has used TARGET. In this case, we must not generate
4233 another copy. This can be detected by TARGET being equal according
4234 to == .
4235 - expand_expr has not used TARGET - that means that the source just
4236 happens to have the same RTX form. Since temp will have been created
4237 by expand_expr, it will compare unequal according to == .
4238 We must generate a copy in this case, to reach the correct number
4239 of volatile memory references. */
4241 if ((! rtx_equal_p (temp, target)
4242 || (temp != target && (side_effects_p (temp)
4243 || side_effects_p (target))))
4244 && TREE_CODE (exp) != ERROR_MARK
4245 && ! dont_store_target)
4247 target = protect_from_queue (target, 1);
4248 if (GET_MODE (temp) != GET_MODE (target)
4249 && GET_MODE (temp) != VOIDmode)
4251 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4252 if (dont_return_target)
4254 /* In this case, we will return TEMP,
4255 so make sure it has the proper mode.
4256 But don't forget to store the value into TARGET. */
4257 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4258 emit_move_insn (target, temp);
4260 else
4261 convert_move (target, temp, unsignedp);
4264 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4266 /* Handle copying a string constant into an array.
4267 The string constant may be shorter than the array.
4268 So copy just the string's actual length, and clear the rest. */
4269 rtx size;
4270 rtx addr;
4272 /* Get the size of the data type of the string,
4273 which is actually the size of the target. */
4274 size = expr_size (exp);
4275 if (GET_CODE (size) == CONST_INT
4276 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4277 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
4278 else
4280 /* Compute the size of the data to copy from the string. */
4281 tree copy_size
4282 = size_binop (MIN_EXPR,
4283 make_tree (sizetype, size),
4284 size_int (TREE_STRING_LENGTH (exp)));
4285 unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
4286 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4287 VOIDmode, 0);
4288 rtx label = 0;
4290 /* Copy that much. */
4291 emit_block_move (target, temp, copy_size_rtx,
4292 TYPE_ALIGN (TREE_TYPE (exp)));
4294 /* Figure out how much is left in TARGET that we have to clear.
4295 Do all calculations in ptr_mode. */
4297 addr = XEXP (target, 0);
4298 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4300 if (GET_CODE (copy_size_rtx) == CONST_INT)
4302 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
4303 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
4304 align = MIN (align,
4305 (unsigned int) (BITS_PER_UNIT
4306 * (INTVAL (copy_size_rtx)
4307 & - INTVAL (copy_size_rtx))));
4309 else
4311 addr = force_reg (ptr_mode, addr);
4312 addr = expand_binop (ptr_mode, add_optab, addr,
4313 copy_size_rtx, NULL_RTX, 0,
4314 OPTAB_LIB_WIDEN);
4316 size = expand_binop (ptr_mode, sub_optab, size,
4317 copy_size_rtx, NULL_RTX, 0,
4318 OPTAB_LIB_WIDEN);
4320 align = BITS_PER_UNIT;
4321 label = gen_label_rtx ();
4322 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4323 GET_MODE (size), 0, 0, label);
4325 align = MIN (align, expr_align (copy_size));
4327 if (size != const0_rtx)
4329 rtx dest = gen_rtx_MEM (BLKmode, addr);
4331 MEM_COPY_ATTRIBUTES (dest, target);
4333 /* Be sure we can write on ADDR. */
4334 in_check_memory_usage = 1;
4335 if (current_function_check_memory_usage)
4336 emit_library_call (chkr_check_addr_libfunc,
4337 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4338 addr, Pmode,
4339 size, TYPE_MODE (sizetype),
4340 GEN_INT (MEMORY_USE_WO),
4341 TYPE_MODE (integer_type_node));
4342 in_check_memory_usage = 0;
4343 clear_storage (dest, size, align);
4346 if (label)
4347 emit_label (label);
4350 /* Handle calls that return values in multiple non-contiguous locations.
4351 The Irix 6 ABI has examples of this. */
4352 else if (GET_CODE (target) == PARALLEL)
4353 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4354 TYPE_ALIGN (TREE_TYPE (exp)));
4355 else if (GET_MODE (temp) == BLKmode)
4356 emit_block_move (target, temp, expr_size (exp),
4357 TYPE_ALIGN (TREE_TYPE (exp)));
4358 else
4359 emit_move_insn (target, temp);
4362 /* If we don't want a value, return NULL_RTX. */
4363 if (! want_value)
4364 return NULL_RTX;
4366 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4367 ??? The latter test doesn't seem to make sense. */
4368 else if (dont_return_target && GET_CODE (temp) != MEM)
4369 return temp;
4371 /* Return TARGET itself if it is a hard register. */
4372 else if (want_value && GET_MODE (target) != BLKmode
4373 && ! (GET_CODE (target) == REG
4374 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4375 return copy_to_reg (target);
4377 else
4378 return target;
4381 /* Return 1 if EXP just contains zeros. */
4383 static int
4384 is_zeros_p (exp)
4385 tree exp;
4387 tree elt;
4389 switch (TREE_CODE (exp))
4391 case CONVERT_EXPR:
4392 case NOP_EXPR:
4393 case NON_LVALUE_EXPR:
4394 return is_zeros_p (TREE_OPERAND (exp, 0));
4396 case INTEGER_CST:
4397 return integer_zerop (exp);
4399 case COMPLEX_CST:
4400 return
4401 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4403 case REAL_CST:
4404 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4406 case CONSTRUCTOR:
4407 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4408 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4409 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4410 if (! is_zeros_p (TREE_VALUE (elt)))
4411 return 0;
4413 return 1;
4415 default:
4416 return 0;
4420 /* Return 1 if EXP contains mostly (3/4) zeros. */
4422 static int
4423 mostly_zeros_p (exp)
4424 tree exp;
4426 if (TREE_CODE (exp) == CONSTRUCTOR)
4428 int elts = 0, zeros = 0;
4429 tree elt = CONSTRUCTOR_ELTS (exp);
4430 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4432 /* If there are no ranges of true bits, it is all zero. */
4433 return elt == NULL_TREE;
4435 for (; elt; elt = TREE_CHAIN (elt))
4437 /* We do not handle the case where the index is a RANGE_EXPR,
4438 so the statistic will be somewhat inaccurate.
4439 We do make a more accurate count in store_constructor itself,
4440 so since this function is only used for nested array elements,
4441 this should be close enough. */
4442 if (mostly_zeros_p (TREE_VALUE (elt)))
4443 zeros++;
4444 elts++;
4447 return 4 * zeros >= 3 * elts;
4450 return is_zeros_p (exp);
4453 /* Helper function for store_constructor.
4454 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4455 TYPE is the type of the CONSTRUCTOR, not the element type.
4456 ALIGN and CLEARED are as for store_constructor.
4457 ALIAS_SET is the alias set to use for any stores.
4459 This provides a recursive shortcut back to store_constructor when it isn't
4460 necessary to go through store_field. This is so that we can pass through
4461 the cleared field to let store_constructor know that we may not have to
4462 clear a substructure if the outer structure has already been cleared. */
4464 static void
4465 store_constructor_field (target, bitsize, bitpos,
4466 mode, exp, type, align, cleared, alias_set)
4467 rtx target;
4468 unsigned HOST_WIDE_INT bitsize;
4469 HOST_WIDE_INT bitpos;
4470 enum machine_mode mode;
4471 tree exp, type;
4472 unsigned int align;
4473 int cleared;
4474 int alias_set;
4476 if (TREE_CODE (exp) == CONSTRUCTOR
4477 && bitpos % BITS_PER_UNIT == 0
4478 /* If we have a non-zero bitpos for a register target, then we just
4479 let store_field do the bitfield handling. This is unlikely to
4480 generate unnecessary clear instructions anyways. */
4481 && (bitpos == 0 || GET_CODE (target) == MEM))
4483 if (bitpos != 0)
4484 target
4485 = adjust_address (target,
4486 GET_MODE (target) == BLKmode
4487 || 0 != (bitpos
4488 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4489 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4492 /* Show the alignment may no longer be what it was and update the alias
4493 set, if required. */
4494 if (bitpos != 0)
4495 align = MIN (align, (unsigned int) bitpos & - bitpos);
4496 if (GET_CODE (target) == MEM)
4497 set_mem_alias_set (target, alias_set);
4499 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4501 else
4502 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4503 int_size_in_bytes (type), alias_set);
4506 /* Store the value of constructor EXP into the rtx TARGET.
4507 TARGET is either a REG or a MEM.
4508 ALIGN is the maximum known alignment for TARGET.
4509 CLEARED is true if TARGET is known to have been zero'd.
4510 SIZE is the number of bytes of TARGET we are allowed to modify: this
4511 may not be the same as the size of EXP if we are assigning to a field
4512 which has been packed to exclude padding bits. */
4514 static void
4515 store_constructor (exp, target, align, cleared, size)
4516 tree exp;
4517 rtx target;
4518 unsigned int align;
4519 int cleared;
4520 HOST_WIDE_INT size;
4522 tree type = TREE_TYPE (exp);
4523 #ifdef WORD_REGISTER_OPERATIONS
4524 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4525 #endif
4527 /* We know our target cannot conflict, since safe_from_p has been called. */
4528 #if 0
4529 /* Don't try copying piece by piece into a hard register
4530 since that is vulnerable to being clobbered by EXP.
4531 Instead, construct in a pseudo register and then copy it all. */
4532 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4534 rtx temp = gen_reg_rtx (GET_MODE (target));
4535 store_constructor (exp, temp, align, cleared, size);
4536 emit_move_insn (target, temp);
4537 return;
4539 #endif
4541 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4542 || TREE_CODE (type) == QUAL_UNION_TYPE)
4544 tree elt;
4546 /* Inform later passes that the whole union value is dead. */
4547 if ((TREE_CODE (type) == UNION_TYPE
4548 || TREE_CODE (type) == QUAL_UNION_TYPE)
4549 && ! cleared)
4551 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4553 /* If the constructor is empty, clear the union. */
4554 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4555 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4558 /* If we are building a static constructor into a register,
4559 set the initial value as zero so we can fold the value into
4560 a constant. But if more than one register is involved,
4561 this probably loses. */
4562 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4563 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4565 if (! cleared)
4566 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4568 cleared = 1;
4571 /* If the constructor has fewer fields than the structure
4572 or if we are initializing the structure to mostly zeros,
4573 clear the whole structure first. Don't do this if TARGET is a
4574 register whose mode size isn't equal to SIZE since clear_storage
4575 can't handle this case. */
4576 else if (size > 0
4577 && ((list_length (CONSTRUCTOR_ELTS (exp))
4578 != fields_length (type))
4579 || mostly_zeros_p (exp))
4580 && (GET_CODE (target) != REG
4581 || (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) == size))
4583 if (! cleared)
4584 clear_storage (target, GEN_INT (size), align);
4586 cleared = 1;
4588 else if (! cleared)
4589 /* Inform later passes that the old value is dead. */
4590 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4592 /* Store each element of the constructor into
4593 the corresponding field of TARGET. */
4595 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4597 tree field = TREE_PURPOSE (elt);
4598 #ifdef WORD_REGISTER_OPERATIONS
4599 tree value = TREE_VALUE (elt);
4600 #endif
4601 enum machine_mode mode;
4602 HOST_WIDE_INT bitsize;
4603 HOST_WIDE_INT bitpos = 0;
4604 int unsignedp;
4605 tree offset;
4606 rtx to_rtx = target;
4608 /* Just ignore missing fields.
4609 We cleared the whole structure, above,
4610 if any fields are missing. */
4611 if (field == 0)
4612 continue;
4614 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4615 continue;
4617 if (host_integerp (DECL_SIZE (field), 1))
4618 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4619 else
4620 bitsize = -1;
4622 unsignedp = TREE_UNSIGNED (field);
4623 mode = DECL_MODE (field);
4624 if (DECL_BIT_FIELD (field))
4625 mode = VOIDmode;
4627 offset = DECL_FIELD_OFFSET (field);
4628 if (host_integerp (offset, 0)
4629 && host_integerp (bit_position (field), 0))
4631 bitpos = int_bit_position (field);
4632 offset = 0;
4634 else
4635 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4637 if (offset)
4639 rtx offset_rtx;
4641 if (contains_placeholder_p (offset))
4642 offset = build (WITH_RECORD_EXPR, sizetype,
4643 offset, make_tree (TREE_TYPE (exp), target));
4645 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4646 if (GET_CODE (to_rtx) != MEM)
4647 abort ();
4649 if (GET_MODE (offset_rtx) != ptr_mode)
4650 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4652 #ifdef POINTERS_EXTEND_UNSIGNED
4653 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4654 #endif
4656 to_rtx = offset_address (to_rtx, offset_rtx,
4657 highest_pow2_factor (offset));
4659 align = DECL_OFFSET_ALIGN (field);
4662 if (TREE_READONLY (field))
4664 if (GET_CODE (to_rtx) == MEM)
4665 to_rtx = copy_rtx (to_rtx);
4667 RTX_UNCHANGING_P (to_rtx) = 1;
4670 #ifdef WORD_REGISTER_OPERATIONS
4671 /* If this initializes a field that is smaller than a word, at the
4672 start of a word, try to widen it to a full word.
4673 This special case allows us to output C++ member function
4674 initializations in a form that the optimizers can understand. */
4675 if (GET_CODE (target) == REG
4676 && bitsize < BITS_PER_WORD
4677 && bitpos % BITS_PER_WORD == 0
4678 && GET_MODE_CLASS (mode) == MODE_INT
4679 && TREE_CODE (value) == INTEGER_CST
4680 && exp_size >= 0
4681 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4683 tree type = TREE_TYPE (value);
4684 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4686 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4687 value = convert (type, value);
4689 if (BYTES_BIG_ENDIAN)
4690 value
4691 = fold (build (LSHIFT_EXPR, type, value,
4692 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4693 bitsize = BITS_PER_WORD;
4694 mode = word_mode;
4696 #endif
4697 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4698 TREE_VALUE (elt), type, align, cleared,
4699 (DECL_NONADDRESSABLE_P (field)
4700 && GET_CODE (to_rtx) == MEM)
4701 ? MEM_ALIAS_SET (to_rtx)
4702 : get_alias_set (TREE_TYPE (field)));
4705 else if (TREE_CODE (type) == ARRAY_TYPE)
4707 tree elt;
4708 int i;
4709 int need_to_clear;
4710 tree domain = TYPE_DOMAIN (type);
4711 tree elttype = TREE_TYPE (type);
4712 int const_bounds_p = (TYPE_MIN_VALUE (domain)
4713 && TYPE_MAX_VALUE (domain)
4714 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4715 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4716 HOST_WIDE_INT minelt = 0;
4717 HOST_WIDE_INT maxelt = 0;
4719 /* If we have constant bounds for the range of the type, get them. */
4720 if (const_bounds_p)
4722 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4723 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4726 /* If the constructor has fewer elements than the array,
4727 clear the whole array first. Similarly if this is
4728 static constructor of a non-BLKmode object. */
4729 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4730 need_to_clear = 1;
4731 else
4733 HOST_WIDE_INT count = 0, zero_count = 0;
4734 need_to_clear = ! const_bounds_p;
4736 /* This loop is a more accurate version of the loop in
4737 mostly_zeros_p (it handles RANGE_EXPR in an index).
4738 It is also needed to check for missing elements. */
4739 for (elt = CONSTRUCTOR_ELTS (exp);
4740 elt != NULL_TREE && ! need_to_clear;
4741 elt = TREE_CHAIN (elt))
4743 tree index = TREE_PURPOSE (elt);
4744 HOST_WIDE_INT this_node_count;
4746 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4748 tree lo_index = TREE_OPERAND (index, 0);
4749 tree hi_index = TREE_OPERAND (index, 1);
4751 if (! host_integerp (lo_index, 1)
4752 || ! host_integerp (hi_index, 1))
4754 need_to_clear = 1;
4755 break;
4758 this_node_count = (tree_low_cst (hi_index, 1)
4759 - tree_low_cst (lo_index, 1) + 1);
4761 else
4762 this_node_count = 1;
4764 count += this_node_count;
4765 if (mostly_zeros_p (TREE_VALUE (elt)))
4766 zero_count += this_node_count;
4769 /* Clear the entire array first if there are any missing elements,
4770 or if the incidence of zero elements is >= 75%. */
4771 if (! need_to_clear
4772 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4773 need_to_clear = 1;
4776 if (need_to_clear && size > 0)
4778 if (! cleared)
4779 clear_storage (target, GEN_INT (size), align);
4780 cleared = 1;
4782 else if (REG_P (target))
4783 /* Inform later passes that the old value is dead. */
4784 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4786 /* Store each element of the constructor into
4787 the corresponding element of TARGET, determined
4788 by counting the elements. */
4789 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4790 elt;
4791 elt = TREE_CHAIN (elt), i++)
4793 enum machine_mode mode;
4794 HOST_WIDE_INT bitsize;
4795 HOST_WIDE_INT bitpos;
4796 int unsignedp;
4797 tree value = TREE_VALUE (elt);
4798 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4799 tree index = TREE_PURPOSE (elt);
4800 rtx xtarget = target;
4802 if (cleared && is_zeros_p (value))
4803 continue;
4805 unsignedp = TREE_UNSIGNED (elttype);
4806 mode = TYPE_MODE (elttype);
4807 if (mode == BLKmode)
4808 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4809 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4810 : -1);
4811 else
4812 bitsize = GET_MODE_BITSIZE (mode);
4814 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4816 tree lo_index = TREE_OPERAND (index, 0);
4817 tree hi_index = TREE_OPERAND (index, 1);
4818 rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
4819 struct nesting *loop;
4820 HOST_WIDE_INT lo, hi, count;
4821 tree position;
4823 /* If the range is constant and "small", unroll the loop. */
4824 if (const_bounds_p
4825 && host_integerp (lo_index, 0)
4826 && host_integerp (hi_index, 0)
4827 && (lo = tree_low_cst (lo_index, 0),
4828 hi = tree_low_cst (hi_index, 0),
4829 count = hi - lo + 1,
4830 (GET_CODE (target) != MEM
4831 || count <= 2
4832 || (host_integerp (TYPE_SIZE (elttype), 1)
4833 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4834 <= 40 * 8)))))
4836 lo -= minelt; hi -= minelt;
4837 for (; lo <= hi; lo++)
4839 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4840 store_constructor_field
4841 (target, bitsize, bitpos, mode, value, type, align,
4842 cleared,
4843 TYPE_NONALIASED_COMPONENT (type)
4844 ? MEM_ALIAS_SET (target) : get_alias_set (elttype));
4847 else
4849 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4850 loop_top = gen_label_rtx ();
4851 loop_end = gen_label_rtx ();
4853 unsignedp = TREE_UNSIGNED (domain);
4855 index = build_decl (VAR_DECL, NULL_TREE, domain);
4857 index_r
4858 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4859 &unsignedp, 0));
4860 SET_DECL_RTL (index, index_r);
4861 if (TREE_CODE (value) == SAVE_EXPR
4862 && SAVE_EXPR_RTL (value) == 0)
4864 /* Make sure value gets expanded once before the
4865 loop. */
4866 expand_expr (value, const0_rtx, VOIDmode, 0);
4867 emit_queue ();
4869 store_expr (lo_index, index_r, 0);
4870 loop = expand_start_loop (0);
4872 /* Assign value to element index. */
4873 position
4874 = convert (ssizetype,
4875 fold (build (MINUS_EXPR, TREE_TYPE (index),
4876 index, TYPE_MIN_VALUE (domain))));
4877 position = size_binop (MULT_EXPR, position,
4878 convert (ssizetype,
4879 TYPE_SIZE_UNIT (elttype)));
4881 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4882 xtarget = offset_address (target, pos_rtx,
4883 highest_pow2_factor (position));
4884 xtarget = adjust_address (xtarget, mode, 0);
4885 if (TREE_CODE (value) == CONSTRUCTOR)
4886 store_constructor (value, xtarget, align, cleared,
4887 bitsize / BITS_PER_UNIT);
4888 else
4889 store_expr (value, xtarget, 0);
4891 expand_exit_loop_if_false (loop,
4892 build (LT_EXPR, integer_type_node,
4893 index, hi_index));
4895 expand_increment (build (PREINCREMENT_EXPR,
4896 TREE_TYPE (index),
4897 index, integer_one_node), 0, 0);
4898 expand_end_loop ();
4899 emit_label (loop_end);
4902 else if ((index != 0 && ! host_integerp (index, 0))
4903 || ! host_integerp (TYPE_SIZE (elttype), 1))
4905 tree position;
4907 if (index == 0)
4908 index = ssize_int (1);
4910 if (minelt)
4911 index = convert (ssizetype,
4912 fold (build (MINUS_EXPR, index,
4913 TYPE_MIN_VALUE (domain))));
4915 position = size_binop (MULT_EXPR, index,
4916 convert (ssizetype,
4917 TYPE_SIZE_UNIT (elttype)));
4918 xtarget = offset_address (target,
4919 expand_expr (position, 0, VOIDmode, 0),
4920 highest_pow2_factor (position));
4921 xtarget = adjust_address (xtarget, mode, 0);
4922 store_expr (value, xtarget, 0);
4924 else
4926 if (index != 0)
4927 bitpos = ((tree_low_cst (index, 0) - minelt)
4928 * tree_low_cst (TYPE_SIZE (elttype), 1));
4929 else
4930 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4932 store_constructor_field (target, bitsize, bitpos, mode, value,
4933 type, align, cleared,
4934 TYPE_NONALIASED_COMPONENT (type)
4935 && GET_CODE (target) == MEM
4936 ? MEM_ALIAS_SET (target) :
4937 get_alias_set (elttype));
4943 /* Set constructor assignments. */
4944 else if (TREE_CODE (type) == SET_TYPE)
4946 tree elt = CONSTRUCTOR_ELTS (exp);
4947 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4948 tree domain = TYPE_DOMAIN (type);
4949 tree domain_min, domain_max, bitlength;
4951 /* The default implementation strategy is to extract the constant
4952 parts of the constructor, use that to initialize the target,
4953 and then "or" in whatever non-constant ranges we need in addition.
4955 If a large set is all zero or all ones, it is
4956 probably better to set it using memset (if available) or bzero.
4957 Also, if a large set has just a single range, it may also be
4958 better to first clear all the first clear the set (using
4959 bzero/memset), and set the bits we want. */
4961 /* Check for all zeros. */
4962 if (elt == NULL_TREE && size > 0)
4964 if (!cleared)
4965 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
4966 return;
4969 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4970 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4971 bitlength = size_binop (PLUS_EXPR,
4972 size_diffop (domain_max, domain_min),
4973 ssize_int (1));
4975 nbits = tree_low_cst (bitlength, 1);
4977 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4978 are "complicated" (more than one range), initialize (the
4979 constant parts) by copying from a constant. */
4980 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4981 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4983 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4984 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4985 char *bit_buffer = (char *) alloca (nbits);
4986 HOST_WIDE_INT word = 0;
4987 unsigned int bit_pos = 0;
4988 unsigned int ibit = 0;
4989 unsigned int offset = 0; /* In bytes from beginning of set. */
4991 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4992 for (;;)
4994 if (bit_buffer[ibit])
4996 if (BYTES_BIG_ENDIAN)
4997 word |= (1 << (set_word_size - 1 - bit_pos));
4998 else
4999 word |= 1 << bit_pos;
5002 bit_pos++; ibit++;
5003 if (bit_pos >= set_word_size || ibit == nbits)
5005 if (word != 0 || ! cleared)
5007 rtx datum = GEN_INT (word);
5008 rtx to_rtx;
5010 /* The assumption here is that it is safe to use
5011 XEXP if the set is multi-word, but not if
5012 it's single-word. */
5013 if (GET_CODE (target) == MEM)
5014 to_rtx = adjust_address (target, mode, offset);
5015 else if (offset == 0)
5016 to_rtx = target;
5017 else
5018 abort ();
5019 emit_move_insn (to_rtx, datum);
5022 if (ibit == nbits)
5023 break;
5024 word = 0;
5025 bit_pos = 0;
5026 offset += set_word_size / BITS_PER_UNIT;
5030 else if (!cleared)
5031 /* Don't bother clearing storage if the set is all ones. */
5032 if (TREE_CHAIN (elt) != NULL_TREE
5033 || (TREE_PURPOSE (elt) == NULL_TREE
5034 ? nbits != 1
5035 : ( ! host_integerp (TREE_VALUE (elt), 0)
5036 || ! host_integerp (TREE_PURPOSE (elt), 0)
5037 || (tree_low_cst (TREE_VALUE (elt), 0)
5038 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5039 != (HOST_WIDE_INT) nbits))))
5040 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
5042 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5044 /* Start of range of element or NULL. */
5045 tree startbit = TREE_PURPOSE (elt);
5046 /* End of range of element, or element value. */
5047 tree endbit = TREE_VALUE (elt);
5048 #ifdef TARGET_MEM_FUNCTIONS
5049 HOST_WIDE_INT startb, endb;
5050 #endif
5051 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5053 bitlength_rtx = expand_expr (bitlength,
5054 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5056 /* Handle non-range tuple element like [ expr ]. */
5057 if (startbit == NULL_TREE)
5059 startbit = save_expr (endbit);
5060 endbit = startbit;
5063 startbit = convert (sizetype, startbit);
5064 endbit = convert (sizetype, endbit);
5065 if (! integer_zerop (domain_min))
5067 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5068 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5070 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5071 EXPAND_CONST_ADDRESS);
5072 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5073 EXPAND_CONST_ADDRESS);
5075 if (REG_P (target))
5077 targetx
5078 = assign_temp
5079 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
5080 TYPE_QUAL_CONST)),
5081 0, 1, 1);
5082 emit_move_insn (targetx, target);
5085 else if (GET_CODE (target) == MEM)
5086 targetx = target;
5087 else
5088 abort ();
5090 #ifdef TARGET_MEM_FUNCTIONS
5091 /* Optimization: If startbit and endbit are
5092 constants divisible by BITS_PER_UNIT,
5093 call memset instead. */
5094 if (TREE_CODE (startbit) == INTEGER_CST
5095 && TREE_CODE (endbit) == INTEGER_CST
5096 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5097 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5099 emit_library_call (memset_libfunc, LCT_NORMAL,
5100 VOIDmode, 3,
5101 plus_constant (XEXP (targetx, 0),
5102 startb / BITS_PER_UNIT),
5103 Pmode,
5104 constm1_rtx, TYPE_MODE (integer_type_node),
5105 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5106 TYPE_MODE (sizetype));
5108 else
5109 #endif
5110 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5111 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5112 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5113 startbit_rtx, TYPE_MODE (sizetype),
5114 endbit_rtx, TYPE_MODE (sizetype));
5116 if (REG_P (target))
5117 emit_move_insn (target, targetx);
5121 else
5122 abort ();
5125 /* Store the value of EXP (an expression tree)
5126 into a subfield of TARGET which has mode MODE and occupies
5127 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5128 If MODE is VOIDmode, it means that we are storing into a bit-field.
5130 If VALUE_MODE is VOIDmode, return nothing in particular.
5131 UNSIGNEDP is not used in this case.
5133 Otherwise, return an rtx for the value stored. This rtx
5134 has mode VALUE_MODE if that is convenient to do.
5135 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5137 ALIGN is the alignment that TARGET is known to have.
5138 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
5140 ALIAS_SET is the alias set for the destination. This value will
5141 (in general) be different from that for TARGET, since TARGET is a
5142 reference to the containing structure. */
5144 static rtx
5145 store_field (target, bitsize, bitpos, mode, exp, value_mode,
5146 unsignedp, align, total_size, alias_set)
5147 rtx target;
5148 HOST_WIDE_INT bitsize;
5149 HOST_WIDE_INT bitpos;
5150 enum machine_mode mode;
5151 tree exp;
5152 enum machine_mode value_mode;
5153 int unsignedp;
5154 unsigned int align;
5155 HOST_WIDE_INT total_size;
5156 int alias_set;
5158 HOST_WIDE_INT width_mask = 0;
5160 if (TREE_CODE (exp) == ERROR_MARK)
5161 return const0_rtx;
5163 /* If we have nothing to store, do nothing unless the expression has
5164 side-effects. */
5165 if (bitsize == 0)
5166 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5168 if (bitsize < HOST_BITS_PER_WIDE_INT)
5169 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5171 /* If we are storing into an unaligned field of an aligned union that is
5172 in a register, we may have the mode of TARGET being an integer mode but
5173 MODE == BLKmode. In that case, get an aligned object whose size and
5174 alignment are the same as TARGET and store TARGET into it (we can avoid
5175 the store if the field being stored is the entire width of TARGET). Then
5176 call ourselves recursively to store the field into a BLKmode version of
5177 that object. Finally, load from the object into TARGET. This is not
5178 very efficient in general, but should only be slightly more expensive
5179 than the otherwise-required unaligned accesses. Perhaps this can be
5180 cleaned up later. */
5182 if (mode == BLKmode
5183 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5185 rtx object
5186 = assign_temp
5187 (build_qualified_type (type_for_mode (GET_MODE (target), 0),
5188 TYPE_QUAL_CONST),
5189 0, 1, 1);
5190 rtx blk_object = copy_rtx (object);
5192 PUT_MODE (blk_object, BLKmode);
5194 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5195 emit_move_insn (object, target);
5197 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
5198 align, total_size, alias_set);
5200 /* Even though we aren't returning target, we need to
5201 give it the updated value. */
5202 emit_move_insn (target, object);
5204 return blk_object;
5207 if (GET_CODE (target) == CONCAT)
5209 /* We're storing into a struct containing a single __complex. */
5211 if (bitpos != 0)
5212 abort ();
5213 return store_expr (exp, target, 0);
5216 /* If the structure is in a register or if the component
5217 is a bit field, we cannot use addressing to access it.
5218 Use bit-field techniques or SUBREG to store in it. */
5220 if (mode == VOIDmode
5221 || (mode != BLKmode && ! direct_store[(int) mode]
5222 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5223 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5224 || GET_CODE (target) == REG
5225 || GET_CODE (target) == SUBREG
5226 /* If the field isn't aligned enough to store as an ordinary memref,
5227 store it as a bit field. */
5228 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5229 && (align < GET_MODE_ALIGNMENT (mode)
5230 || bitpos % GET_MODE_ALIGNMENT (mode)))
5231 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5232 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
5233 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
5234 /* If the RHS and field are a constant size and the size of the
5235 RHS isn't the same size as the bitfield, we must use bitfield
5236 operations. */
5237 || (bitsize >= 0
5238 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5239 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5241 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5243 /* If BITSIZE is narrower than the size of the type of EXP
5244 we will be narrowing TEMP. Normally, what's wanted are the
5245 low-order bits. However, if EXP's type is a record and this is
5246 big-endian machine, we want the upper BITSIZE bits. */
5247 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5248 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
5249 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5250 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5251 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5252 - bitsize),
5253 temp, 1);
5255 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5256 MODE. */
5257 if (mode != VOIDmode && mode != BLKmode
5258 && mode != TYPE_MODE (TREE_TYPE (exp)))
5259 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5261 /* If the modes of TARGET and TEMP are both BLKmode, both
5262 must be in memory and BITPOS must be aligned on a byte
5263 boundary. If so, we simply do a block copy. */
5264 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5266 unsigned int exp_align = expr_align (exp);
5268 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5269 || bitpos % BITS_PER_UNIT != 0)
5270 abort ();
5272 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5274 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
5275 align = MIN (exp_align, align);
5277 /* Find an alignment that is consistent with the bit position. */
5278 while ((bitpos % align) != 0)
5279 align >>= 1;
5281 emit_block_move (target, temp,
5282 bitsize == -1 ? expr_size (exp)
5283 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5284 / BITS_PER_UNIT),
5285 align);
5287 return value_mode == VOIDmode ? const0_rtx : target;
5290 /* Store the value in the bitfield. */
5291 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
5292 if (value_mode != VOIDmode)
5294 /* The caller wants an rtx for the value. */
5295 /* If possible, avoid refetching from the bitfield itself. */
5296 if (width_mask != 0
5297 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5299 tree count;
5300 enum machine_mode tmode;
5302 if (unsignedp)
5303 return expand_and (temp,
5304 GEN_INT
5305 (trunc_int_for_mode
5306 (width_mask,
5307 GET_MODE (temp) == VOIDmode
5308 ? value_mode
5309 : GET_MODE (temp))), NULL_RTX);
5310 tmode = GET_MODE (temp);
5311 if (tmode == VOIDmode)
5312 tmode = value_mode;
5313 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5314 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5315 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5317 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5318 NULL_RTX, value_mode, 0, align,
5319 total_size);
5321 return const0_rtx;
5323 else
5325 rtx addr = XEXP (target, 0);
5326 rtx to_rtx;
5328 /* If a value is wanted, it must be the lhs;
5329 so make the address stable for multiple use. */
5331 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5332 && ! CONSTANT_ADDRESS_P (addr)
5333 /* A frame-pointer reference is already stable. */
5334 && ! (GET_CODE (addr) == PLUS
5335 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5336 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5337 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5338 target = replace_equiv_address (target, copy_to_reg (addr));
5340 /* Now build a reference to just the desired component. */
5342 to_rtx = copy_rtx (adjust_address (target, mode,
5343 bitpos / BITS_PER_UNIT));
5345 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5346 /* If the address of the structure varies, then it might be on
5347 the stack. And, stack slots may be shared across scopes.
5348 So, two different structures, of different types, can end up
5349 at the same location. We will give the structures alias set
5350 zero; here we must be careful not to give non-zero alias sets
5351 to their fields. */
5352 set_mem_alias_set (to_rtx,
5353 rtx_varies_p (addr, /*for_alias=*/0)
5354 ? 0 : alias_set);
5356 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5360 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5361 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5362 codes and find the ultimate containing object, which we return.
5364 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5365 bit position, and *PUNSIGNEDP to the signedness of the field.
5366 If the position of the field is variable, we store a tree
5367 giving the variable offset (in units) in *POFFSET.
5368 This offset is in addition to the bit position.
5369 If the position is not variable, we store 0 in *POFFSET.
5370 We set *PALIGNMENT to the alignment of the address that will be
5371 computed. This is the alignment of the thing we return if *POFFSET
5372 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5374 If any of the extraction expressions is volatile,
5375 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5377 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5378 is a mode that can be used to access the field. In that case, *PBITSIZE
5379 is redundant.
5381 If the field describes a variable-sized object, *PMODE is set to
5382 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5383 this case, but the address of the object can be found. */
5385 tree
5386 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5387 punsignedp, pvolatilep, palignment)
5388 tree exp;
5389 HOST_WIDE_INT *pbitsize;
5390 HOST_WIDE_INT *pbitpos;
5391 tree *poffset;
5392 enum machine_mode *pmode;
5393 int *punsignedp;
5394 int *pvolatilep;
5395 unsigned int *palignment;
5397 tree size_tree = 0;
5398 enum machine_mode mode = VOIDmode;
5399 tree offset = size_zero_node;
5400 tree bit_offset = bitsize_zero_node;
5401 unsigned int alignment = BIGGEST_ALIGNMENT;
5402 tree placeholder_ptr = 0;
5403 tree tem;
5405 /* First get the mode, signedness, and size. We do this from just the
5406 outermost expression. */
5407 if (TREE_CODE (exp) == COMPONENT_REF)
5409 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5410 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5411 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5413 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5415 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5417 size_tree = TREE_OPERAND (exp, 1);
5418 *punsignedp = TREE_UNSIGNED (exp);
5420 else
5422 mode = TYPE_MODE (TREE_TYPE (exp));
5423 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5425 if (mode == BLKmode)
5426 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5427 else
5428 *pbitsize = GET_MODE_BITSIZE (mode);
5431 if (size_tree != 0)
5433 if (! host_integerp (size_tree, 1))
5434 mode = BLKmode, *pbitsize = -1;
5435 else
5436 *pbitsize = tree_low_cst (size_tree, 1);
5439 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5440 and find the ultimate containing object. */
5441 while (1)
5443 if (TREE_CODE (exp) == BIT_FIELD_REF)
5444 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5445 else if (TREE_CODE (exp) == COMPONENT_REF)
5447 tree field = TREE_OPERAND (exp, 1);
5448 tree this_offset = DECL_FIELD_OFFSET (field);
5450 /* If this field hasn't been filled in yet, don't go
5451 past it. This should only happen when folding expressions
5452 made during type construction. */
5453 if (this_offset == 0)
5454 break;
5455 else if (! TREE_CONSTANT (this_offset)
5456 && contains_placeholder_p (this_offset))
5457 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5459 offset = size_binop (PLUS_EXPR, offset, this_offset);
5460 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5461 DECL_FIELD_BIT_OFFSET (field));
5463 if (! host_integerp (offset, 0))
5464 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5467 else if (TREE_CODE (exp) == ARRAY_REF
5468 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5470 tree index = TREE_OPERAND (exp, 1);
5471 tree array = TREE_OPERAND (exp, 0);
5472 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5473 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5474 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5476 /* We assume all arrays have sizes that are a multiple of a byte.
5477 First subtract the lower bound, if any, in the type of the
5478 index, then convert to sizetype and multiply by the size of the
5479 array element. */
5480 if (low_bound != 0 && ! integer_zerop (low_bound))
5481 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5482 index, low_bound));
5484 /* If the index has a self-referential type, pass it to a
5485 WITH_RECORD_EXPR; if the component size is, pass our
5486 component to one. */
5487 if (! TREE_CONSTANT (index)
5488 && contains_placeholder_p (index))
5489 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5490 if (! TREE_CONSTANT (unit_size)
5491 && contains_placeholder_p (unit_size))
5492 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5494 offset = size_binop (PLUS_EXPR, offset,
5495 size_binop (MULT_EXPR,
5496 convert (sizetype, index),
5497 unit_size));
5500 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5502 exp = find_placeholder (exp, &placeholder_ptr);
5503 continue;
5505 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5506 && ! ((TREE_CODE (exp) == NOP_EXPR
5507 || TREE_CODE (exp) == CONVERT_EXPR)
5508 && (TYPE_MODE (TREE_TYPE (exp))
5509 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5510 break;
5512 /* If any reference in the chain is volatile, the effect is volatile. */
5513 if (TREE_THIS_VOLATILE (exp))
5514 *pvolatilep = 1;
5516 /* If the offset is non-constant already, then we can't assume any
5517 alignment more than the alignment here. */
5518 if (! TREE_CONSTANT (offset))
5519 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5521 exp = TREE_OPERAND (exp, 0);
5524 if (DECL_P (exp))
5525 alignment = MIN (alignment, DECL_ALIGN (exp));
5526 else if (TREE_TYPE (exp) != 0)
5527 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5529 /* If OFFSET is constant, see if we can return the whole thing as a
5530 constant bit position. Otherwise, split it up. */
5531 if (host_integerp (offset, 0)
5532 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5533 bitsize_unit_node))
5534 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5535 && host_integerp (tem, 0))
5536 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5537 else
5538 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5540 *pmode = mode;
5541 *palignment = alignment;
5542 return exp;
5545 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5547 static enum memory_use_mode
5548 get_memory_usage_from_modifier (modifier)
5549 enum expand_modifier modifier;
5551 switch (modifier)
5553 case EXPAND_NORMAL:
5554 case EXPAND_SUM:
5555 return MEMORY_USE_RO;
5556 break;
5557 case EXPAND_MEMORY_USE_WO:
5558 return MEMORY_USE_WO;
5559 break;
5560 case EXPAND_MEMORY_USE_RW:
5561 return MEMORY_USE_RW;
5562 break;
5563 case EXPAND_MEMORY_USE_DONT:
5564 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5565 MEMORY_USE_DONT, because they are modifiers to a call of
5566 expand_expr in the ADDR_EXPR case of expand_expr. */
5567 case EXPAND_CONST_ADDRESS:
5568 case EXPAND_INITIALIZER:
5569 return MEMORY_USE_DONT;
5570 case EXPAND_MEMORY_USE_BAD:
5571 default:
5572 abort ();
5576 /* Given an rtx VALUE that may contain additions and multiplications, return
5577 an equivalent value that just refers to a register, memory, or constant.
5578 This is done by generating instructions to perform the arithmetic and
5579 returning a pseudo-register containing the value.
5581 The returned value may be a REG, SUBREG, MEM or constant. */
5584 force_operand (value, target)
5585 rtx value, target;
5587 optab binoptab = 0;
5588 /* Use a temporary to force order of execution of calls to
5589 `force_operand'. */
5590 rtx tmp;
5591 rtx op2;
5592 /* Use subtarget as the target for operand 0 of a binary operation. */
5593 rtx subtarget = get_subtarget (target);
5595 /* Check for a PIC address load. */
5596 if (flag_pic
5597 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5598 && XEXP (value, 0) == pic_offset_table_rtx
5599 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5600 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5601 || GET_CODE (XEXP (value, 1)) == CONST))
5603 if (!subtarget)
5604 subtarget = gen_reg_rtx (GET_MODE (value));
5605 emit_move_insn (subtarget, value);
5606 return subtarget;
5609 if (GET_CODE (value) == PLUS)
5610 binoptab = add_optab;
5611 else if (GET_CODE (value) == MINUS)
5612 binoptab = sub_optab;
5613 else if (GET_CODE (value) == MULT)
5615 op2 = XEXP (value, 1);
5616 if (!CONSTANT_P (op2)
5617 && !(GET_CODE (op2) == REG && op2 != subtarget))
5618 subtarget = 0;
5619 tmp = force_operand (XEXP (value, 0), subtarget);
5620 return expand_mult (GET_MODE (value), tmp,
5621 force_operand (op2, NULL_RTX),
5622 target, 1);
5625 if (binoptab)
5627 op2 = XEXP (value, 1);
5628 if (!CONSTANT_P (op2)
5629 && !(GET_CODE (op2) == REG && op2 != subtarget))
5630 subtarget = 0;
5631 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5633 binoptab = add_optab;
5634 op2 = negate_rtx (GET_MODE (value), op2);
5637 /* Check for an addition with OP2 a constant integer and our first
5638 operand a PLUS of a virtual register and something else. In that
5639 case, we want to emit the sum of the virtual register and the
5640 constant first and then add the other value. This allows virtual
5641 register instantiation to simply modify the constant rather than
5642 creating another one around this addition. */
5643 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5644 && GET_CODE (XEXP (value, 0)) == PLUS
5645 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5646 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5647 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5649 rtx temp = expand_binop (GET_MODE (value), binoptab,
5650 XEXP (XEXP (value, 0), 0), op2,
5651 subtarget, 0, OPTAB_LIB_WIDEN);
5652 return expand_binop (GET_MODE (value), binoptab, temp,
5653 force_operand (XEXP (XEXP (value, 0), 1), 0),
5654 target, 0, OPTAB_LIB_WIDEN);
5657 tmp = force_operand (XEXP (value, 0), subtarget);
5658 return expand_binop (GET_MODE (value), binoptab, tmp,
5659 force_operand (op2, NULL_RTX),
5660 target, 0, OPTAB_LIB_WIDEN);
5661 /* We give UNSIGNEDP = 0 to expand_binop
5662 because the only operations we are expanding here are signed ones. */
5664 return value;
5667 /* Subroutine of expand_expr: return nonzero iff there is no way that
5668 EXP can reference X, which is being modified. TOP_P is nonzero if this
5669 call is going to be used to determine whether we need a temporary
5670 for EXP, as opposed to a recursive call to this function.
5672 It is always safe for this routine to return zero since it merely
5673 searches for optimization opportunities. */
5676 safe_from_p (x, exp, top_p)
5677 rtx x;
5678 tree exp;
5679 int top_p;
5681 rtx exp_rtl = 0;
5682 int i, nops;
5683 static tree save_expr_list;
5685 if (x == 0
5686 /* If EXP has varying size, we MUST use a target since we currently
5687 have no way of allocating temporaries of variable size
5688 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5689 So we assume here that something at a higher level has prevented a
5690 clash. This is somewhat bogus, but the best we can do. Only
5691 do this when X is BLKmode and when we are at the top level. */
5692 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5693 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5694 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5695 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5696 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5697 != INTEGER_CST)
5698 && GET_MODE (x) == BLKmode)
5699 /* If X is in the outgoing argument area, it is always safe. */
5700 || (GET_CODE (x) == MEM
5701 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5702 || (GET_CODE (XEXP (x, 0)) == PLUS
5703 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5704 return 1;
5706 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5707 find the underlying pseudo. */
5708 if (GET_CODE (x) == SUBREG)
5710 x = SUBREG_REG (x);
5711 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5712 return 0;
5715 /* A SAVE_EXPR might appear many times in the expression passed to the
5716 top-level safe_from_p call, and if it has a complex subexpression,
5717 examining it multiple times could result in a combinatorial explosion.
5718 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5719 with optimization took about 28 minutes to compile -- even though it was
5720 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5721 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5722 we have processed. Note that the only test of top_p was above. */
5724 if (top_p)
5726 int rtn;
5727 tree t;
5729 save_expr_list = 0;
5731 rtn = safe_from_p (x, exp, 0);
5733 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5734 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5736 return rtn;
5739 /* Now look at our tree code and possibly recurse. */
5740 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5742 case 'd':
5743 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
5744 break;
5746 case 'c':
5747 return 1;
5749 case 'x':
5750 if (TREE_CODE (exp) == TREE_LIST)
5751 return ((TREE_VALUE (exp) == 0
5752 || safe_from_p (x, TREE_VALUE (exp), 0))
5753 && (TREE_CHAIN (exp) == 0
5754 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5755 else if (TREE_CODE (exp) == ERROR_MARK)
5756 return 1; /* An already-visited SAVE_EXPR? */
5757 else
5758 return 0;
5760 case '1':
5761 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5763 case '2':
5764 case '<':
5765 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5766 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5768 case 'e':
5769 case 'r':
5770 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5771 the expression. If it is set, we conflict iff we are that rtx or
5772 both are in memory. Otherwise, we check all operands of the
5773 expression recursively. */
5775 switch (TREE_CODE (exp))
5777 case ADDR_EXPR:
5778 return (staticp (TREE_OPERAND (exp, 0))
5779 || TREE_STATIC (exp)
5780 || safe_from_p (x, TREE_OPERAND (exp, 0), 0));
5782 case INDIRECT_REF:
5783 if (GET_CODE (x) == MEM
5784 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5785 get_alias_set (exp)))
5786 return 0;
5787 break;
5789 case CALL_EXPR:
5790 /* Assume that the call will clobber all hard registers and
5791 all of memory. */
5792 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5793 || GET_CODE (x) == MEM)
5794 return 0;
5795 break;
5797 case RTL_EXPR:
5798 /* If a sequence exists, we would have to scan every instruction
5799 in the sequence to see if it was safe. This is probably not
5800 worthwhile. */
5801 if (RTL_EXPR_SEQUENCE (exp))
5802 return 0;
5804 exp_rtl = RTL_EXPR_RTL (exp);
5805 break;
5807 case WITH_CLEANUP_EXPR:
5808 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5809 break;
5811 case CLEANUP_POINT_EXPR:
5812 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5814 case SAVE_EXPR:
5815 exp_rtl = SAVE_EXPR_RTL (exp);
5816 if (exp_rtl)
5817 break;
5819 /* If we've already scanned this, don't do it again. Otherwise,
5820 show we've scanned it and record for clearing the flag if we're
5821 going on. */
5822 if (TREE_PRIVATE (exp))
5823 return 1;
5825 TREE_PRIVATE (exp) = 1;
5826 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5828 TREE_PRIVATE (exp) = 0;
5829 return 0;
5832 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5833 return 1;
5835 case BIND_EXPR:
5836 /* The only operand we look at is operand 1. The rest aren't
5837 part of the expression. */
5838 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5840 case METHOD_CALL_EXPR:
5841 /* This takes an rtx argument, but shouldn't appear here. */
5842 abort ();
5844 default:
5845 break;
5848 /* If we have an rtx, we do not need to scan our operands. */
5849 if (exp_rtl)
5850 break;
5852 nops = first_rtl_op (TREE_CODE (exp));
5853 for (i = 0; i < nops; i++)
5854 if (TREE_OPERAND (exp, i) != 0
5855 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5856 return 0;
5858 /* If this is a language-specific tree code, it may require
5859 special handling. */
5860 if ((unsigned int) TREE_CODE (exp)
5861 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5862 && lang_safe_from_p
5863 && !(*lang_safe_from_p) (x, exp))
5864 return 0;
5867 /* If we have an rtl, find any enclosed object. Then see if we conflict
5868 with it. */
5869 if (exp_rtl)
5871 if (GET_CODE (exp_rtl) == SUBREG)
5873 exp_rtl = SUBREG_REG (exp_rtl);
5874 if (GET_CODE (exp_rtl) == REG
5875 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5876 return 0;
5879 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5880 are memory and they conflict. */
5881 return ! (rtx_equal_p (x, exp_rtl)
5882 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5883 && true_dependence (exp_rtl, GET_MODE (x), x,
5884 rtx_addr_varies_p)));
5887 /* If we reach here, it is safe. */
5888 return 1;
5891 /* Subroutine of expand_expr: return rtx if EXP is a
5892 variable or parameter; else return 0. */
5894 static rtx
5895 var_rtx (exp)
5896 tree exp;
5898 STRIP_NOPS (exp);
5899 switch (TREE_CODE (exp))
5901 case PARM_DECL:
5902 case VAR_DECL:
5903 return DECL_RTL (exp);
5904 default:
5905 return 0;
5909 #ifdef MAX_INTEGER_COMPUTATION_MODE
5911 void
5912 check_max_integer_computation_mode (exp)
5913 tree exp;
5915 enum tree_code code;
5916 enum machine_mode mode;
5918 /* Strip any NOPs that don't change the mode. */
5919 STRIP_NOPS (exp);
5920 code = TREE_CODE (exp);
5922 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5923 if (code == NOP_EXPR
5924 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5925 return;
5927 /* First check the type of the overall operation. We need only look at
5928 unary, binary and relational operations. */
5929 if (TREE_CODE_CLASS (code) == '1'
5930 || TREE_CODE_CLASS (code) == '2'
5931 || TREE_CODE_CLASS (code) == '<')
5933 mode = TYPE_MODE (TREE_TYPE (exp));
5934 if (GET_MODE_CLASS (mode) == MODE_INT
5935 && mode > MAX_INTEGER_COMPUTATION_MODE)
5936 internal_error ("unsupported wide integer operation");
5939 /* Check operand of a unary op. */
5940 if (TREE_CODE_CLASS (code) == '1')
5942 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5943 if (GET_MODE_CLASS (mode) == MODE_INT
5944 && mode > MAX_INTEGER_COMPUTATION_MODE)
5945 internal_error ("unsupported wide integer operation");
5948 /* Check operands of a binary/comparison op. */
5949 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5951 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5952 if (GET_MODE_CLASS (mode) == MODE_INT
5953 && mode > MAX_INTEGER_COMPUTATION_MODE)
5954 internal_error ("unsupported wide integer operation");
5956 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5957 if (GET_MODE_CLASS (mode) == MODE_INT
5958 && mode > MAX_INTEGER_COMPUTATION_MODE)
5959 internal_error ("unsupported wide integer operation");
5962 #endif
5964 /* Return the highest power of two that EXP is known to be a multiple of.
5965 This is used in updating alignment of MEMs in array references. */
5967 static HOST_WIDE_INT
5968 highest_pow2_factor (exp)
5969 tree exp;
5971 HOST_WIDE_INT c0, c1;
5973 switch (TREE_CODE (exp))
5975 case INTEGER_CST:
5976 /* If the integer is expressable in a HOST_WIDE_INT, we can find
5977 the lowest bit that's a one. If the result is zero or negative,
5978 pessimize by returning 1. This is overly-conservative, but such
5979 things should not happen in the offset expressions that we are
5980 called with. */
5981 if (host_integerp (exp, 0))
5983 c0 = tree_low_cst (exp, 0);
5984 return c0 >= 0 ? c0 & -c0 : 1;
5986 break;
5988 case PLUS_EXPR: case MINUS_EXPR:
5989 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5990 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5991 return MIN (c0, c1);
5993 case MULT_EXPR:
5994 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5995 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5996 return c0 * c1;
5998 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
5999 case CEIL_DIV_EXPR:
6000 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6001 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6002 return c0 / c1;
6004 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6005 case COMPOUND_EXPR: case SAVE_EXPR:
6006 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6008 case COND_EXPR:
6009 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6010 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6011 return MIN (c0, c1);
6013 default:
6014 break;
6017 return 1;
6020 /* Return an object on the placeholder list that matches EXP, a
6021 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6022 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6023 tree.def. If no such object is found, abort. If PLIST is nonzero, it is
6024 a location which initially points to a starting location in the
6025 placeholder list (zero means start of the list) and where a pointer into
6026 the placeholder list at which the object is found is placed. */
6028 tree
6029 find_placeholder (exp, plist)
6030 tree exp;
6031 tree *plist;
6033 tree type = TREE_TYPE (exp);
6034 tree placeholder_expr;
6036 for (placeholder_expr
6037 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6038 placeholder_expr != 0;
6039 placeholder_expr = TREE_CHAIN (placeholder_expr))
6041 tree need_type = TYPE_MAIN_VARIANT (type);
6042 tree elt;
6044 /* Find the outermost reference that is of the type we want. If none,
6045 see if any object has a type that is a pointer to the type we
6046 want. */
6047 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6048 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6049 || TREE_CODE (elt) == COND_EXPR)
6050 ? TREE_OPERAND (elt, 1)
6051 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6052 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6053 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6054 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6055 ? TREE_OPERAND (elt, 0) : 0))
6056 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6058 if (plist)
6059 *plist = placeholder_expr;
6060 return elt;
6063 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6065 = ((TREE_CODE (elt) == COMPOUND_EXPR
6066 || TREE_CODE (elt) == COND_EXPR)
6067 ? TREE_OPERAND (elt, 1)
6068 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6069 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6070 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6071 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6072 ? TREE_OPERAND (elt, 0) : 0))
6073 if (POINTER_TYPE_P (TREE_TYPE (elt))
6074 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6075 == need_type))
6077 if (plist)
6078 *plist = placeholder_expr;
6079 return build1 (INDIRECT_REF, need_type, elt);
6083 abort ();
6086 /* expand_expr: generate code for computing expression EXP.
6087 An rtx for the computed value is returned. The value is never null.
6088 In the case of a void EXP, const0_rtx is returned.
6090 The value may be stored in TARGET if TARGET is nonzero.
6091 TARGET is just a suggestion; callers must assume that
6092 the rtx returned may not be the same as TARGET.
6094 If TARGET is CONST0_RTX, it means that the value will be ignored.
6096 If TMODE is not VOIDmode, it suggests generating the
6097 result in mode TMODE. But this is done only when convenient.
6098 Otherwise, TMODE is ignored and the value generated in its natural mode.
6099 TMODE is just a suggestion; callers must assume that
6100 the rtx returned may not have mode TMODE.
6102 Note that TARGET may have neither TMODE nor MODE. In that case, it
6103 probably will not be used.
6105 If MODIFIER is EXPAND_SUM then when EXP is an addition
6106 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6107 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6108 products as above, or REG or MEM, or constant.
6109 Ordinarily in such cases we would output mul or add instructions
6110 and then return a pseudo reg containing the sum.
6112 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6113 it also marks a label as absolutely required (it can't be dead).
6114 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6115 This is used for outputting expressions used in initializers.
6117 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6118 with a constant address even if that address is not normally legitimate.
6119 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6122 expand_expr (exp, target, tmode, modifier)
6123 tree exp;
6124 rtx target;
6125 enum machine_mode tmode;
6126 enum expand_modifier modifier;
6128 rtx op0, op1, temp;
6129 tree type = TREE_TYPE (exp);
6130 int unsignedp = TREE_UNSIGNED (type);
6131 enum machine_mode mode;
6132 enum tree_code code = TREE_CODE (exp);
6133 optab this_optab;
6134 rtx subtarget, original_target;
6135 int ignore;
6136 tree context;
6137 /* Used by check-memory-usage to make modifier read only. */
6138 enum expand_modifier ro_modifier;
6140 /* Handle ERROR_MARK before anybody tries to access its type. */
6141 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6143 op0 = CONST0_RTX (tmode);
6144 if (op0 != 0)
6145 return op0;
6146 return const0_rtx;
6149 mode = TYPE_MODE (type);
6150 /* Use subtarget as the target for operand 0 of a binary operation. */
6151 subtarget = get_subtarget (target);
6152 original_target = target;
6153 ignore = (target == const0_rtx
6154 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6155 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6156 || code == COND_EXPR)
6157 && TREE_CODE (type) == VOID_TYPE));
6159 /* Make a read-only version of the modifier. */
6160 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
6161 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
6162 ro_modifier = modifier;
6163 else
6164 ro_modifier = EXPAND_NORMAL;
6166 /* If we are going to ignore this result, we need only do something
6167 if there is a side-effect somewhere in the expression. If there
6168 is, short-circuit the most common cases here. Note that we must
6169 not call expand_expr with anything but const0_rtx in case this
6170 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6172 if (ignore)
6174 if (! TREE_SIDE_EFFECTS (exp))
6175 return const0_rtx;
6177 /* Ensure we reference a volatile object even if value is ignored, but
6178 don't do this if all we are doing is taking its address. */
6179 if (TREE_THIS_VOLATILE (exp)
6180 && TREE_CODE (exp) != FUNCTION_DECL
6181 && mode != VOIDmode && mode != BLKmode
6182 && modifier != EXPAND_CONST_ADDRESS)
6184 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
6185 if (GET_CODE (temp) == MEM)
6186 temp = copy_to_reg (temp);
6187 return const0_rtx;
6190 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6191 || code == INDIRECT_REF || code == BUFFER_REF)
6192 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6193 VOIDmode, ro_modifier);
6194 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6195 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6197 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6198 ro_modifier);
6199 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6200 ro_modifier);
6201 return const0_rtx;
6203 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6204 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6205 /* If the second operand has no side effects, just evaluate
6206 the first. */
6207 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6208 VOIDmode, ro_modifier);
6209 else if (code == BIT_FIELD_REF)
6211 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6212 ro_modifier);
6213 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6214 ro_modifier);
6215 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode,
6216 ro_modifier);
6217 return const0_rtx;
6220 target = 0;
6223 #ifdef MAX_INTEGER_COMPUTATION_MODE
6224 /* Only check stuff here if the mode we want is different from the mode
6225 of the expression; if it's the same, check_max_integer_computiation_mode
6226 will handle it. Do we really need to check this stuff at all? */
6228 if (target
6229 && GET_MODE (target) != mode
6230 && TREE_CODE (exp) != INTEGER_CST
6231 && TREE_CODE (exp) != PARM_DECL
6232 && TREE_CODE (exp) != ARRAY_REF
6233 && TREE_CODE (exp) != ARRAY_RANGE_REF
6234 && TREE_CODE (exp) != COMPONENT_REF
6235 && TREE_CODE (exp) != BIT_FIELD_REF
6236 && TREE_CODE (exp) != INDIRECT_REF
6237 && TREE_CODE (exp) != CALL_EXPR
6238 && TREE_CODE (exp) != VAR_DECL
6239 && TREE_CODE (exp) != RTL_EXPR)
6241 enum machine_mode mode = GET_MODE (target);
6243 if (GET_MODE_CLASS (mode) == MODE_INT
6244 && mode > MAX_INTEGER_COMPUTATION_MODE)
6245 internal_error ("unsupported wide integer operation");
6248 if (tmode != mode
6249 && TREE_CODE (exp) != INTEGER_CST
6250 && TREE_CODE (exp) != PARM_DECL
6251 && TREE_CODE (exp) != ARRAY_REF
6252 && TREE_CODE (exp) != ARRAY_RANGE_REF
6253 && TREE_CODE (exp) != COMPONENT_REF
6254 && TREE_CODE (exp) != BIT_FIELD_REF
6255 && TREE_CODE (exp) != INDIRECT_REF
6256 && TREE_CODE (exp) != VAR_DECL
6257 && TREE_CODE (exp) != CALL_EXPR
6258 && TREE_CODE (exp) != RTL_EXPR
6259 && GET_MODE_CLASS (tmode) == MODE_INT
6260 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6261 internal_error ("unsupported wide integer operation");
6263 check_max_integer_computation_mode (exp);
6264 #endif
6266 /* If will do cse, generate all results into pseudo registers
6267 since 1) that allows cse to find more things
6268 and 2) otherwise cse could produce an insn the machine
6269 cannot support. */
6271 if (! cse_not_expected && mode != BLKmode && target
6272 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
6273 target = subtarget;
6275 switch (code)
6277 case LABEL_DECL:
6279 tree function = decl_function_context (exp);
6280 /* Handle using a label in a containing function. */
6281 if (function != current_function_decl
6282 && function != inline_function_decl && function != 0)
6284 struct function *p = find_function_data (function);
6285 p->expr->x_forced_labels
6286 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6287 p->expr->x_forced_labels);
6289 else
6291 if (modifier == EXPAND_INITIALIZER)
6292 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6293 label_rtx (exp),
6294 forced_labels);
6297 temp = gen_rtx_MEM (FUNCTION_MODE,
6298 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6299 if (function != current_function_decl
6300 && function != inline_function_decl && function != 0)
6301 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6302 return temp;
6305 case PARM_DECL:
6306 if (DECL_RTL (exp) == 0)
6308 error_with_decl (exp, "prior parameter's size depends on `%s'");
6309 return CONST0_RTX (mode);
6312 /* ... fall through ... */
6314 case VAR_DECL:
6315 /* If a static var's type was incomplete when the decl was written,
6316 but the type is complete now, lay out the decl now. */
6317 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6318 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6320 layout_decl (exp, 0);
6321 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
6324 /* Although static-storage variables start off initialized, according to
6325 ANSI C, a memcpy could overwrite them with uninitialized values. So
6326 we check them too. This also lets us check for read-only variables
6327 accessed via a non-const declaration, in case it won't be detected
6328 any other way (e.g., in an embedded system or OS kernel without
6329 memory protection).
6331 Aggregates are not checked here; they're handled elsewhere. */
6332 if (cfun && current_function_check_memory_usage
6333 && code == VAR_DECL
6334 && GET_CODE (DECL_RTL (exp)) == MEM
6335 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6337 enum memory_use_mode memory_usage;
6338 memory_usage = get_memory_usage_from_modifier (modifier);
6340 in_check_memory_usage = 1;
6341 if (memory_usage != MEMORY_USE_DONT)
6342 emit_library_call (chkr_check_addr_libfunc,
6343 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6344 XEXP (DECL_RTL (exp), 0), Pmode,
6345 GEN_INT (int_size_in_bytes (type)),
6346 TYPE_MODE (sizetype),
6347 GEN_INT (memory_usage),
6348 TYPE_MODE (integer_type_node));
6349 in_check_memory_usage = 0;
6352 /* ... fall through ... */
6354 case FUNCTION_DECL:
6355 case RESULT_DECL:
6356 if (DECL_RTL (exp) == 0)
6357 abort ();
6359 /* Ensure variable marked as used even if it doesn't go through
6360 a parser. If it hasn't be used yet, write out an external
6361 definition. */
6362 if (! TREE_USED (exp))
6364 assemble_external (exp);
6365 TREE_USED (exp) = 1;
6368 /* Show we haven't gotten RTL for this yet. */
6369 temp = 0;
6371 /* Handle variables inherited from containing functions. */
6372 context = decl_function_context (exp);
6374 /* We treat inline_function_decl as an alias for the current function
6375 because that is the inline function whose vars, types, etc.
6376 are being merged into the current function.
6377 See expand_inline_function. */
6379 if (context != 0 && context != current_function_decl
6380 && context != inline_function_decl
6381 /* If var is static, we don't need a static chain to access it. */
6382 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6383 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6385 rtx addr;
6387 /* Mark as non-local and addressable. */
6388 DECL_NONLOCAL (exp) = 1;
6389 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6390 abort ();
6391 mark_addressable (exp);
6392 if (GET_CODE (DECL_RTL (exp)) != MEM)
6393 abort ();
6394 addr = XEXP (DECL_RTL (exp), 0);
6395 if (GET_CODE (addr) == MEM)
6396 addr
6397 = replace_equiv_address (addr,
6398 fix_lexical_addr (XEXP (addr, 0), exp));
6399 else
6400 addr = fix_lexical_addr (addr, exp);
6402 temp = replace_equiv_address (DECL_RTL (exp), addr);
6405 /* This is the case of an array whose size is to be determined
6406 from its initializer, while the initializer is still being parsed.
6407 See expand_decl. */
6409 else if (GET_CODE (DECL_RTL (exp)) == MEM
6410 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6411 temp = validize_mem (DECL_RTL (exp));
6413 /* If DECL_RTL is memory, we are in the normal case and either
6414 the address is not valid or it is not a register and -fforce-addr
6415 is specified, get the address into a register. */
6417 else if (GET_CODE (DECL_RTL (exp)) == MEM
6418 && modifier != EXPAND_CONST_ADDRESS
6419 && modifier != EXPAND_SUM
6420 && modifier != EXPAND_INITIALIZER
6421 && (! memory_address_p (DECL_MODE (exp),
6422 XEXP (DECL_RTL (exp), 0))
6423 || (flag_force_addr
6424 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6425 temp = replace_equiv_address (DECL_RTL (exp),
6426 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6428 /* If we got something, return it. But first, set the alignment
6429 if the address is a register. */
6430 if (temp != 0)
6432 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6433 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6435 return temp;
6438 /* If the mode of DECL_RTL does not match that of the decl, it
6439 must be a promoted value. We return a SUBREG of the wanted mode,
6440 but mark it so that we know that it was already extended. */
6442 if (GET_CODE (DECL_RTL (exp)) == REG
6443 && GET_MODE (DECL_RTL (exp)) != mode)
6445 /* Get the signedness used for this variable. Ensure we get the
6446 same mode we got when the variable was declared. */
6447 if (GET_MODE (DECL_RTL (exp))
6448 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6449 abort ();
6451 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6452 SUBREG_PROMOTED_VAR_P (temp) = 1;
6453 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6454 return temp;
6457 return DECL_RTL (exp);
6459 case INTEGER_CST:
6460 return immed_double_const (TREE_INT_CST_LOW (exp),
6461 TREE_INT_CST_HIGH (exp), mode);
6463 case CONST_DECL:
6464 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6465 EXPAND_MEMORY_USE_BAD);
6467 case REAL_CST:
6468 /* If optimized, generate immediate CONST_DOUBLE
6469 which will be turned into memory by reload if necessary.
6471 We used to force a register so that loop.c could see it. But
6472 this does not allow gen_* patterns to perform optimizations with
6473 the constants. It also produces two insns in cases like "x = 1.0;".
6474 On most machines, floating-point constants are not permitted in
6475 many insns, so we'd end up copying it to a register in any case.
6477 Now, we do the copying in expand_binop, if appropriate. */
6478 return immed_real_const (exp);
6480 case COMPLEX_CST:
6481 case STRING_CST:
6482 if (! TREE_CST_RTL (exp))
6483 output_constant_def (exp, 1);
6485 /* TREE_CST_RTL probably contains a constant address.
6486 On RISC machines where a constant address isn't valid,
6487 make some insns to get that address into a register. */
6488 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6489 && modifier != EXPAND_CONST_ADDRESS
6490 && modifier != EXPAND_INITIALIZER
6491 && modifier != EXPAND_SUM
6492 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6493 || (flag_force_addr
6494 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6495 return replace_equiv_address (TREE_CST_RTL (exp),
6496 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6497 return TREE_CST_RTL (exp);
6499 case EXPR_WITH_FILE_LOCATION:
6501 rtx to_return;
6502 const char *saved_input_filename = input_filename;
6503 int saved_lineno = lineno;
6504 input_filename = EXPR_WFL_FILENAME (exp);
6505 lineno = EXPR_WFL_LINENO (exp);
6506 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6507 emit_line_note (input_filename, lineno);
6508 /* Possibly avoid switching back and forth here. */
6509 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6510 input_filename = saved_input_filename;
6511 lineno = saved_lineno;
6512 return to_return;
6515 case SAVE_EXPR:
6516 context = decl_function_context (exp);
6518 /* If this SAVE_EXPR was at global context, assume we are an
6519 initialization function and move it into our context. */
6520 if (context == 0)
6521 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6523 /* We treat inline_function_decl as an alias for the current function
6524 because that is the inline function whose vars, types, etc.
6525 are being merged into the current function.
6526 See expand_inline_function. */
6527 if (context == current_function_decl || context == inline_function_decl)
6528 context = 0;
6530 /* If this is non-local, handle it. */
6531 if (context)
6533 /* The following call just exists to abort if the context is
6534 not of a containing function. */
6535 find_function_data (context);
6537 temp = SAVE_EXPR_RTL (exp);
6538 if (temp && GET_CODE (temp) == REG)
6540 put_var_into_stack (exp);
6541 temp = SAVE_EXPR_RTL (exp);
6543 if (temp == 0 || GET_CODE (temp) != MEM)
6544 abort ();
6545 return
6546 replace_equiv_address (temp,
6547 fix_lexical_addr (XEXP (temp, 0), exp));
6549 if (SAVE_EXPR_RTL (exp) == 0)
6551 if (mode == VOIDmode)
6552 temp = const0_rtx;
6553 else
6554 temp = assign_temp (build_qualified_type (type,
6555 (TYPE_QUALS (type)
6556 | TYPE_QUAL_CONST)),
6557 3, 0, 0);
6559 SAVE_EXPR_RTL (exp) = temp;
6560 if (!optimize && GET_CODE (temp) == REG)
6561 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6562 save_expr_regs);
6564 /* If the mode of TEMP does not match that of the expression, it
6565 must be a promoted value. We pass store_expr a SUBREG of the
6566 wanted mode but mark it so that we know that it was already
6567 extended. Note that `unsignedp' was modified above in
6568 this case. */
6570 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6572 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6573 SUBREG_PROMOTED_VAR_P (temp) = 1;
6574 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6577 if (temp == const0_rtx)
6578 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6579 EXPAND_MEMORY_USE_BAD);
6580 else
6581 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6583 TREE_USED (exp) = 1;
6586 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6587 must be a promoted value. We return a SUBREG of the wanted mode,
6588 but mark it so that we know that it was already extended. */
6590 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6591 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6593 /* Compute the signedness and make the proper SUBREG. */
6594 promote_mode (type, mode, &unsignedp, 0);
6595 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6596 SUBREG_PROMOTED_VAR_P (temp) = 1;
6597 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6598 return temp;
6601 return SAVE_EXPR_RTL (exp);
6603 case UNSAVE_EXPR:
6605 rtx temp;
6606 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6607 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6608 return temp;
6611 case PLACEHOLDER_EXPR:
6613 tree old_list = placeholder_list;
6614 tree placeholder_expr = 0;
6616 exp = find_placeholder (exp, &placeholder_expr);
6617 placeholder_list = TREE_CHAIN (placeholder_expr);
6618 temp = expand_expr (exp, original_target, tmode, ro_modifier);
6619 placeholder_list = old_list;
6620 return temp;
6623 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6624 abort ();
6626 case WITH_RECORD_EXPR:
6627 /* Put the object on the placeholder list, expand our first operand,
6628 and pop the list. */
6629 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6630 placeholder_list);
6631 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6632 tmode, ro_modifier);
6633 placeholder_list = TREE_CHAIN (placeholder_list);
6634 return target;
6636 case GOTO_EXPR:
6637 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6638 expand_goto (TREE_OPERAND (exp, 0));
6639 else
6640 expand_computed_goto (TREE_OPERAND (exp, 0));
6641 return const0_rtx;
6643 case EXIT_EXPR:
6644 expand_exit_loop_if_false (NULL,
6645 invert_truthvalue (TREE_OPERAND (exp, 0)));
6646 return const0_rtx;
6648 case LABELED_BLOCK_EXPR:
6649 if (LABELED_BLOCK_BODY (exp))
6650 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6651 /* Should perhaps use expand_label, but this is simpler and safer. */
6652 do_pending_stack_adjust ();
6653 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6654 return const0_rtx;
6656 case EXIT_BLOCK_EXPR:
6657 if (EXIT_BLOCK_RETURN (exp))
6658 sorry ("returned value in block_exit_expr");
6659 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6660 return const0_rtx;
6662 case LOOP_EXPR:
6663 push_temp_slots ();
6664 expand_start_loop (1);
6665 expand_expr_stmt (TREE_OPERAND (exp, 0));
6666 expand_end_loop ();
6667 pop_temp_slots ();
6669 return const0_rtx;
6671 case BIND_EXPR:
6673 tree vars = TREE_OPERAND (exp, 0);
6674 int vars_need_expansion = 0;
6676 /* Need to open a binding contour here because
6677 if there are any cleanups they must be contained here. */
6678 expand_start_bindings (2);
6680 /* Mark the corresponding BLOCK for output in its proper place. */
6681 if (TREE_OPERAND (exp, 2) != 0
6682 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6683 insert_block (TREE_OPERAND (exp, 2));
6685 /* If VARS have not yet been expanded, expand them now. */
6686 while (vars)
6688 if (!DECL_RTL_SET_P (vars))
6690 vars_need_expansion = 1;
6691 expand_decl (vars);
6693 expand_decl_init (vars);
6694 vars = TREE_CHAIN (vars);
6697 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6699 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6701 return temp;
6704 case RTL_EXPR:
6705 if (RTL_EXPR_SEQUENCE (exp))
6707 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6708 abort ();
6709 emit_insns (RTL_EXPR_SEQUENCE (exp));
6710 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6712 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6713 free_temps_for_rtl_expr (exp);
6714 return RTL_EXPR_RTL (exp);
6716 case CONSTRUCTOR:
6717 /* If we don't need the result, just ensure we evaluate any
6718 subexpressions. */
6719 if (ignore)
6721 tree elt;
6722 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6723 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6724 EXPAND_MEMORY_USE_BAD);
6725 return const0_rtx;
6728 /* All elts simple constants => refer to a constant in memory. But
6729 if this is a non-BLKmode mode, let it store a field at a time
6730 since that should make a CONST_INT or CONST_DOUBLE when we
6731 fold. Likewise, if we have a target we can use, it is best to
6732 store directly into the target unless the type is large enough
6733 that memcpy will be used. If we are making an initializer and
6734 all operands are constant, put it in memory as well. */
6735 else if ((TREE_STATIC (exp)
6736 && ((mode == BLKmode
6737 && ! (target != 0 && safe_from_p (target, exp, 1)))
6738 || TREE_ADDRESSABLE (exp)
6739 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6740 && (! MOVE_BY_PIECES_P
6741 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6742 TYPE_ALIGN (type)))
6743 && ! mostly_zeros_p (exp))))
6744 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6746 rtx constructor = output_constant_def (exp, 1);
6748 if (modifier != EXPAND_CONST_ADDRESS
6749 && modifier != EXPAND_INITIALIZER
6750 && modifier != EXPAND_SUM)
6751 constructor = validize_mem (constructor);
6753 return constructor;
6755 else
6757 /* Handle calls that pass values in multiple non-contiguous
6758 locations. The Irix 6 ABI has examples of this. */
6759 if (target == 0 || ! safe_from_p (target, exp, 1)
6760 || GET_CODE (target) == PARALLEL)
6761 target
6762 = assign_temp (build_qualified_type (type,
6763 (TYPE_QUALS (type)
6764 | (TREE_READONLY (exp)
6765 * TYPE_QUAL_CONST))),
6766 TREE_ADDRESSABLE (exp), 1, 1);
6768 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6769 int_size_in_bytes (TREE_TYPE (exp)));
6770 return target;
6773 case INDIRECT_REF:
6775 tree exp1 = TREE_OPERAND (exp, 0);
6776 tree index;
6777 tree string = string_constant (exp1, &index);
6779 /* Try to optimize reads from const strings. */
6780 if (string
6781 && TREE_CODE (string) == STRING_CST
6782 && TREE_CODE (index) == INTEGER_CST
6783 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6784 && GET_MODE_CLASS (mode) == MODE_INT
6785 && GET_MODE_SIZE (mode) == 1
6786 && modifier != EXPAND_MEMORY_USE_WO)
6787 return
6788 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6790 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6791 op0 = memory_address (mode, op0);
6793 if (cfun && current_function_check_memory_usage
6794 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6796 enum memory_use_mode memory_usage;
6797 memory_usage = get_memory_usage_from_modifier (modifier);
6799 if (memory_usage != MEMORY_USE_DONT)
6801 in_check_memory_usage = 1;
6802 emit_library_call (chkr_check_addr_libfunc,
6803 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6804 Pmode, GEN_INT (int_size_in_bytes (type)),
6805 TYPE_MODE (sizetype),
6806 GEN_INT (memory_usage),
6807 TYPE_MODE (integer_type_node));
6808 in_check_memory_usage = 0;
6812 temp = gen_rtx_MEM (mode, op0);
6813 set_mem_attributes (temp, exp, 0);
6815 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6816 here, because, in C and C++, the fact that a location is accessed
6817 through a pointer to const does not mean that the value there can
6818 never change. Languages where it can never change should
6819 also set TREE_STATIC. */
6820 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6822 /* If we are writing to this object and its type is a record with
6823 readonly fields, we must mark it as readonly so it will
6824 conflict with readonly references to those fields. */
6825 if (modifier == EXPAND_MEMORY_USE_WO && readonly_fields_p (type))
6826 RTX_UNCHANGING_P (temp) = 1;
6828 return temp;
6831 case ARRAY_REF:
6832 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6833 abort ();
6836 tree array = TREE_OPERAND (exp, 0);
6837 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6838 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6839 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6840 HOST_WIDE_INT i;
6842 /* Optimize the special-case of a zero lower bound.
6844 We convert the low_bound to sizetype to avoid some problems
6845 with constant folding. (E.g. suppose the lower bound is 1,
6846 and its mode is QI. Without the conversion, (ARRAY
6847 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6848 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6850 if (! integer_zerop (low_bound))
6851 index = size_diffop (index, convert (sizetype, low_bound));
6853 /* Fold an expression like: "foo"[2].
6854 This is not done in fold so it won't happen inside &.
6855 Don't fold if this is for wide characters since it's too
6856 difficult to do correctly and this is a very rare case. */
6858 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6859 && TREE_CODE (array) == STRING_CST
6860 && TREE_CODE (index) == INTEGER_CST
6861 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6862 && GET_MODE_CLASS (mode) == MODE_INT
6863 && GET_MODE_SIZE (mode) == 1)
6864 return
6865 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6867 /* If this is a constant index into a constant array,
6868 just get the value from the array. Handle both the cases when
6869 we have an explicit constructor and when our operand is a variable
6870 that was declared const. */
6872 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6873 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6874 && TREE_CODE (index) == INTEGER_CST
6875 && 0 > compare_tree_int (index,
6876 list_length (CONSTRUCTOR_ELTS
6877 (TREE_OPERAND (exp, 0)))))
6879 tree elem;
6881 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6882 i = TREE_INT_CST_LOW (index);
6883 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6886 if (elem)
6887 return expand_expr (fold (TREE_VALUE (elem)), target,
6888 tmode, ro_modifier);
6891 else if (optimize >= 1
6892 && modifier != EXPAND_CONST_ADDRESS
6893 && modifier != EXPAND_INITIALIZER
6894 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6895 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6896 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6898 if (TREE_CODE (index) == INTEGER_CST)
6900 tree init = DECL_INITIAL (array);
6902 if (TREE_CODE (init) == CONSTRUCTOR)
6904 tree elem;
6906 for (elem = CONSTRUCTOR_ELTS (init);
6907 (elem
6908 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6909 elem = TREE_CHAIN (elem))
6912 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6913 return expand_expr (fold (TREE_VALUE (elem)), target,
6914 tmode, ro_modifier);
6916 else if (TREE_CODE (init) == STRING_CST
6917 && 0 > compare_tree_int (index,
6918 TREE_STRING_LENGTH (init)))
6920 tree type = TREE_TYPE (TREE_TYPE (init));
6921 enum machine_mode mode = TYPE_MODE (type);
6923 if (GET_MODE_CLASS (mode) == MODE_INT
6924 && GET_MODE_SIZE (mode) == 1)
6925 return (GEN_INT
6926 (TREE_STRING_POINTER
6927 (init)[TREE_INT_CST_LOW (index)]));
6932 /* Fall through. */
6934 case COMPONENT_REF:
6935 case BIT_FIELD_REF:
6936 case ARRAY_RANGE_REF:
6937 /* If the operand is a CONSTRUCTOR, we can just extract the
6938 appropriate field if it is present. Don't do this if we have
6939 already written the data since we want to refer to that copy
6940 and varasm.c assumes that's what we'll do. */
6941 if (code == COMPONENT_REF
6942 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6943 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6945 tree elt;
6947 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6948 elt = TREE_CHAIN (elt))
6949 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6950 /* We can normally use the value of the field in the
6951 CONSTRUCTOR. However, if this is a bitfield in
6952 an integral mode that we can fit in a HOST_WIDE_INT,
6953 we must mask only the number of bits in the bitfield,
6954 since this is done implicitly by the constructor. If
6955 the bitfield does not meet either of those conditions,
6956 we can't do this optimization. */
6957 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6958 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6959 == MODE_INT)
6960 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6961 <= HOST_BITS_PER_WIDE_INT))))
6963 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6964 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6966 HOST_WIDE_INT bitsize
6967 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6969 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6971 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6972 op0 = expand_and (op0, op1, target);
6974 else
6976 enum machine_mode imode
6977 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6978 tree count
6979 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6982 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6983 target, 0);
6984 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6985 target, 0);
6989 return op0;
6994 enum machine_mode mode1;
6995 HOST_WIDE_INT bitsize, bitpos;
6996 tree offset;
6997 int volatilep = 0;
6998 unsigned int alignment;
6999 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7000 &mode1, &unsignedp, &volatilep,
7001 &alignment);
7002 rtx orig_op0;
7004 /* If we got back the original object, something is wrong. Perhaps
7005 we are evaluating an expression too early. In any event, don't
7006 infinitely recurse. */
7007 if (tem == exp)
7008 abort ();
7010 /* If TEM's type is a union of variable size, pass TARGET to the inner
7011 computation, since it will need a temporary and TARGET is known
7012 to have to do. This occurs in unchecked conversion in Ada. */
7014 orig_op0 = op0
7015 = expand_expr (tem,
7016 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7017 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7018 != INTEGER_CST)
7019 ? target : NULL_RTX),
7020 VOIDmode,
7021 (modifier == EXPAND_INITIALIZER
7022 || modifier == EXPAND_CONST_ADDRESS)
7023 ? modifier : EXPAND_NORMAL);
7025 /* If this is a constant, put it into a register if it is a
7026 legitimate constant and OFFSET is 0 and memory if it isn't. */
7027 if (CONSTANT_P (op0))
7029 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7030 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7031 && offset == 0)
7032 op0 = force_reg (mode, op0);
7033 else
7034 op0 = validize_mem (force_const_mem (mode, op0));
7037 if (offset != 0)
7039 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7041 /* If this object is in a register, put it into memory.
7042 This case can't occur in C, but can in Ada if we have
7043 unchecked conversion of an expression from a scalar type to
7044 an array or record type. */
7045 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7046 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7048 /* If the operand is a SAVE_EXPR, we can deal with this by
7049 forcing the SAVE_EXPR into memory. */
7050 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7052 put_var_into_stack (TREE_OPERAND (exp, 0));
7053 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7055 else
7057 tree nt
7058 = build_qualified_type (TREE_TYPE (tem),
7059 (TYPE_QUALS (TREE_TYPE (tem))
7060 | TYPE_QUAL_CONST));
7061 rtx memloc = assign_temp (nt, 1, 1, 1);
7063 mark_temp_addr_taken (memloc);
7064 emit_move_insn (memloc, op0);
7065 op0 = memloc;
7069 if (GET_CODE (op0) != MEM)
7070 abort ();
7072 if (GET_MODE (offset_rtx) != ptr_mode)
7073 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7075 #ifdef POINTERS_EXTEND_UNSIGNED
7076 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
7077 #endif
7079 /* A constant address in OP0 can have VOIDmode, we must not try
7080 to call force_reg for that case. Avoid that case. */
7081 if (GET_CODE (op0) == MEM
7082 && GET_MODE (op0) == BLKmode
7083 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7084 && bitsize != 0
7085 && (bitpos % bitsize) == 0
7086 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7087 && alignment == GET_MODE_ALIGNMENT (mode1))
7089 rtx temp = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7091 if (GET_CODE (XEXP (temp, 0)) == REG)
7092 op0 = temp;
7093 else
7094 op0 = (replace_equiv_address
7095 (op0,
7096 force_reg (GET_MODE (XEXP (temp, 0)),
7097 XEXP (temp, 0))));
7098 bitpos = 0;
7101 op0 = offset_address (op0, offset_rtx,
7102 highest_pow2_factor (offset));
7105 /* Don't forget about volatility even if this is a bitfield. */
7106 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7108 if (op0 == orig_op0)
7109 op0 = copy_rtx (op0);
7111 MEM_VOLATILE_P (op0) = 1;
7114 /* Check the access. */
7115 if (cfun != 0 && current_function_check_memory_usage
7116 && GET_CODE (op0) == MEM)
7118 enum memory_use_mode memory_usage;
7119 memory_usage = get_memory_usage_from_modifier (modifier);
7121 if (memory_usage != MEMORY_USE_DONT)
7123 rtx to;
7124 int size;
7126 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
7127 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
7129 /* Check the access right of the pointer. */
7130 in_check_memory_usage = 1;
7131 if (size > BITS_PER_UNIT)
7132 emit_library_call (chkr_check_addr_libfunc,
7133 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
7134 Pmode, GEN_INT (size / BITS_PER_UNIT),
7135 TYPE_MODE (sizetype),
7136 GEN_INT (memory_usage),
7137 TYPE_MODE (integer_type_node));
7138 in_check_memory_usage = 0;
7142 /* In cases where an aligned union has an unaligned object
7143 as a field, we might be extracting a BLKmode value from
7144 an integer-mode (e.g., SImode) object. Handle this case
7145 by doing the extract into an object as wide as the field
7146 (which we know to be the width of a basic mode), then
7147 storing into memory, and changing the mode to BLKmode. */
7148 if (mode1 == VOIDmode
7149 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7150 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7151 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7152 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7153 && modifier != EXPAND_CONST_ADDRESS
7154 && modifier != EXPAND_INITIALIZER)
7155 /* If the field isn't aligned enough to fetch as a memref,
7156 fetch it as a bit field. */
7157 || (mode1 != BLKmode
7158 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
7159 && ((TYPE_ALIGN (TREE_TYPE (tem))
7160 < GET_MODE_ALIGNMENT (mode))
7161 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7162 /* If the type and the field are a constant size and the
7163 size of the type isn't the same size as the bitfield,
7164 we must use bitfield operations. */
7165 || (bitsize >= 0
7166 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7167 == INTEGER_CST)
7168 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7169 bitsize))
7170 || (mode == BLKmode
7171 && SLOW_UNALIGNED_ACCESS (mode, alignment)
7172 && (TYPE_ALIGN (type) > alignment
7173 || bitpos % TYPE_ALIGN (type) != 0)))
7175 enum machine_mode ext_mode = mode;
7177 if (ext_mode == BLKmode
7178 && ! (target != 0 && GET_CODE (op0) == MEM
7179 && GET_CODE (target) == MEM
7180 && bitpos % BITS_PER_UNIT == 0))
7181 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7183 if (ext_mode == BLKmode)
7185 /* In this case, BITPOS must start at a byte boundary and
7186 TARGET, if specified, must be a MEM. */
7187 if (GET_CODE (op0) != MEM
7188 || (target != 0 && GET_CODE (target) != MEM)
7189 || bitpos % BITS_PER_UNIT != 0)
7190 abort ();
7192 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7193 if (target == 0)
7194 target = assign_temp (type, 0, 1, 1);
7196 emit_block_move (target, op0,
7197 bitsize == -1 ? expr_size (exp)
7198 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7199 / BITS_PER_UNIT),
7200 BITS_PER_UNIT);
7202 return target;
7205 op0 = validize_mem (op0);
7207 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7208 mark_reg_pointer (XEXP (op0, 0), alignment);
7210 op0 = extract_bit_field (op0, bitsize, bitpos,
7211 unsignedp, target, ext_mode, ext_mode,
7212 alignment,
7213 int_size_in_bytes (TREE_TYPE (tem)));
7215 /* If the result is a record type and BITSIZE is narrower than
7216 the mode of OP0, an integral mode, and this is a big endian
7217 machine, we must put the field into the high-order bits. */
7218 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7219 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7220 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
7221 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7222 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7223 - bitsize),
7224 op0, 1);
7226 if (mode == BLKmode)
7228 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
7229 TYPE_QUAL_CONST);
7230 rtx new = assign_temp (nt, 0, 1, 1);
7232 emit_move_insn (new, op0);
7233 op0 = copy_rtx (new);
7234 PUT_MODE (op0, BLKmode);
7237 return op0;
7240 /* If the result is BLKmode, use that to access the object
7241 now as well. */
7242 if (mode == BLKmode)
7243 mode1 = BLKmode;
7245 /* Get a reference to just this component. */
7246 if (modifier == EXPAND_CONST_ADDRESS
7247 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7248 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7249 else
7250 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7252 if (op0 == orig_op0)
7253 op0 = copy_rtx (op0);
7255 set_mem_attributes (op0, exp, 0);
7256 if (GET_CODE (XEXP (op0, 0)) == REG)
7257 mark_reg_pointer (XEXP (op0, 0), alignment);
7259 MEM_VOLATILE_P (op0) |= volatilep;
7260 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7261 || modifier == EXPAND_CONST_ADDRESS
7262 || modifier == EXPAND_INITIALIZER)
7263 return op0;
7264 else if (target == 0)
7265 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7267 convert_move (target, op0, unsignedp);
7268 return target;
7271 case VTABLE_REF:
7273 rtx insn, before = get_last_insn (), vtbl_ref;
7275 /* Evaluate the interior expression. */
7276 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7277 tmode, modifier);
7279 /* Get or create an instruction off which to hang a note. */
7280 if (REG_P (subtarget))
7282 target = subtarget;
7283 insn = get_last_insn ();
7284 if (insn == before)
7285 abort ();
7286 if (! INSN_P (insn))
7287 insn = prev_nonnote_insn (insn);
7289 else
7291 target = gen_reg_rtx (GET_MODE (subtarget));
7292 insn = emit_move_insn (target, subtarget);
7295 /* Collect the data for the note. */
7296 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7297 vtbl_ref = plus_constant (vtbl_ref,
7298 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7299 /* Discard the initial CONST that was added. */
7300 vtbl_ref = XEXP (vtbl_ref, 0);
7302 REG_NOTES (insn)
7303 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7305 return target;
7308 /* Intended for a reference to a buffer of a file-object in Pascal.
7309 But it's not certain that a special tree code will really be
7310 necessary for these. INDIRECT_REF might work for them. */
7311 case BUFFER_REF:
7312 abort ();
7314 case IN_EXPR:
7316 /* Pascal set IN expression.
7318 Algorithm:
7319 rlo = set_low - (set_low%bits_per_word);
7320 the_word = set [ (index - rlo)/bits_per_word ];
7321 bit_index = index % bits_per_word;
7322 bitmask = 1 << bit_index;
7323 return !!(the_word & bitmask); */
7325 tree set = TREE_OPERAND (exp, 0);
7326 tree index = TREE_OPERAND (exp, 1);
7327 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7328 tree set_type = TREE_TYPE (set);
7329 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7330 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7331 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7332 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7333 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7334 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7335 rtx setaddr = XEXP (setval, 0);
7336 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7337 rtx rlow;
7338 rtx diff, quo, rem, addr, bit, result;
7340 /* If domain is empty, answer is no. Likewise if index is constant
7341 and out of bounds. */
7342 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7343 && TREE_CODE (set_low_bound) == INTEGER_CST
7344 && tree_int_cst_lt (set_high_bound, set_low_bound))
7345 || (TREE_CODE (index) == INTEGER_CST
7346 && TREE_CODE (set_low_bound) == INTEGER_CST
7347 && tree_int_cst_lt (index, set_low_bound))
7348 || (TREE_CODE (set_high_bound) == INTEGER_CST
7349 && TREE_CODE (index) == INTEGER_CST
7350 && tree_int_cst_lt (set_high_bound, index))))
7351 return const0_rtx;
7353 if (target == 0)
7354 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7356 /* If we get here, we have to generate the code for both cases
7357 (in range and out of range). */
7359 op0 = gen_label_rtx ();
7360 op1 = gen_label_rtx ();
7362 if (! (GET_CODE (index_val) == CONST_INT
7363 && GET_CODE (lo_r) == CONST_INT))
7365 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7366 GET_MODE (index_val), iunsignedp, 0, op1);
7369 if (! (GET_CODE (index_val) == CONST_INT
7370 && GET_CODE (hi_r) == CONST_INT))
7372 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7373 GET_MODE (index_val), iunsignedp, 0, op1);
7376 /* Calculate the element number of bit zero in the first word
7377 of the set. */
7378 if (GET_CODE (lo_r) == CONST_INT)
7379 rlow = GEN_INT (INTVAL (lo_r)
7380 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7381 else
7382 rlow = expand_binop (index_mode, and_optab, lo_r,
7383 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7384 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7386 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7387 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7389 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7390 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7391 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7392 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7394 addr = memory_address (byte_mode,
7395 expand_binop (index_mode, add_optab, diff,
7396 setaddr, NULL_RTX, iunsignedp,
7397 OPTAB_LIB_WIDEN));
7399 /* Extract the bit we want to examine. */
7400 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7401 gen_rtx_MEM (byte_mode, addr),
7402 make_tree (TREE_TYPE (index), rem),
7403 NULL_RTX, 1);
7404 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7405 GET_MODE (target) == byte_mode ? target : 0,
7406 1, OPTAB_LIB_WIDEN);
7408 if (result != target)
7409 convert_move (target, result, 1);
7411 /* Output the code to handle the out-of-range case. */
7412 emit_jump (op0);
7413 emit_label (op1);
7414 emit_move_insn (target, const0_rtx);
7415 emit_label (op0);
7416 return target;
7419 case WITH_CLEANUP_EXPR:
7420 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7422 WITH_CLEANUP_EXPR_RTL (exp)
7423 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7424 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 1));
7426 /* That's it for this cleanup. */
7427 TREE_OPERAND (exp, 1) = 0;
7429 return WITH_CLEANUP_EXPR_RTL (exp);
7431 case CLEANUP_POINT_EXPR:
7433 /* Start a new binding layer that will keep track of all cleanup
7434 actions to be performed. */
7435 expand_start_bindings (2);
7437 target_temp_slot_level = temp_slot_level;
7439 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7440 /* If we're going to use this value, load it up now. */
7441 if (! ignore)
7442 op0 = force_not_mem (op0);
7443 preserve_temp_slots (op0);
7444 expand_end_bindings (NULL_TREE, 0, 0);
7446 return op0;
7448 case CALL_EXPR:
7449 /* Check for a built-in function. */
7450 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7451 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7452 == FUNCTION_DECL)
7453 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7455 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7456 == BUILT_IN_FRONTEND)
7457 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7458 else
7459 return expand_builtin (exp, target, subtarget, tmode, ignore);
7462 return expand_call (exp, target, ignore);
7464 case NON_LVALUE_EXPR:
7465 case NOP_EXPR:
7466 case CONVERT_EXPR:
7467 case REFERENCE_EXPR:
7468 if (TREE_OPERAND (exp, 0) == error_mark_node)
7469 return const0_rtx;
7471 if (TREE_CODE (type) == UNION_TYPE)
7473 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7475 /* If both input and output are BLKmode, this conversion
7476 isn't actually doing anything unless we need to make the
7477 alignment stricter. */
7478 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7479 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7480 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7481 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7482 modifier);
7484 if (target == 0)
7485 target = assign_temp (type, 0, 1, 1);
7487 if (GET_CODE (target) == MEM)
7488 /* Store data into beginning of memory target. */
7489 store_expr (TREE_OPERAND (exp, 0),
7490 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7492 else if (GET_CODE (target) == REG)
7493 /* Store this field into a union of the proper type. */
7494 store_field (target,
7495 MIN ((int_size_in_bytes (TREE_TYPE
7496 (TREE_OPERAND (exp, 0)))
7497 * BITS_PER_UNIT),
7498 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7499 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7500 VOIDmode, 0, BITS_PER_UNIT,
7501 int_size_in_bytes (type), 0);
7502 else
7503 abort ();
7505 /* Return the entire union. */
7506 return target;
7509 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7511 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7512 ro_modifier);
7514 /* If the signedness of the conversion differs and OP0 is
7515 a promoted SUBREG, clear that indication since we now
7516 have to do the proper extension. */
7517 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7518 && GET_CODE (op0) == SUBREG)
7519 SUBREG_PROMOTED_VAR_P (op0) = 0;
7521 return op0;
7524 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7525 if (GET_MODE (op0) == mode)
7526 return op0;
7528 /* If OP0 is a constant, just convert it into the proper mode. */
7529 if (CONSTANT_P (op0))
7530 return
7531 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7532 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7534 if (modifier == EXPAND_INITIALIZER)
7535 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7537 if (target == 0)
7538 return
7539 convert_to_mode (mode, op0,
7540 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7541 else
7542 convert_move (target, op0,
7543 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7544 return target;
7546 case PLUS_EXPR:
7547 /* We come here from MINUS_EXPR when the second operand is a
7548 constant. */
7549 plus_expr:
7550 this_optab = ! unsignedp && flag_trapv
7551 && (GET_MODE_CLASS(mode) == MODE_INT)
7552 ? addv_optab : add_optab;
7554 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7555 something else, make sure we add the register to the constant and
7556 then to the other thing. This case can occur during strength
7557 reduction and doing it this way will produce better code if the
7558 frame pointer or argument pointer is eliminated.
7560 fold-const.c will ensure that the constant is always in the inner
7561 PLUS_EXPR, so the only case we need to do anything about is if
7562 sp, ap, or fp is our second argument, in which case we must swap
7563 the innermost first argument and our second argument. */
7565 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7566 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7567 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7568 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7569 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7570 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7572 tree t = TREE_OPERAND (exp, 1);
7574 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7575 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7578 /* If the result is to be ptr_mode and we are adding an integer to
7579 something, we might be forming a constant. So try to use
7580 plus_constant. If it produces a sum and we can't accept it,
7581 use force_operand. This allows P = &ARR[const] to generate
7582 efficient code on machines where a SYMBOL_REF is not a valid
7583 address.
7585 If this is an EXPAND_SUM call, always return the sum. */
7586 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7587 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7589 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7590 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7591 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7593 rtx constant_part;
7595 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7596 EXPAND_SUM);
7597 /* Use immed_double_const to ensure that the constant is
7598 truncated according to the mode of OP1, then sign extended
7599 to a HOST_WIDE_INT. Using the constant directly can result
7600 in non-canonical RTL in a 64x32 cross compile. */
7601 constant_part
7602 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7603 (HOST_WIDE_INT) 0,
7604 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7605 op1 = plus_constant (op1, INTVAL (constant_part));
7606 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7607 op1 = force_operand (op1, target);
7608 return op1;
7611 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7612 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7613 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7615 rtx constant_part;
7617 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7618 EXPAND_SUM);
7619 if (! CONSTANT_P (op0))
7621 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7622 VOIDmode, modifier);
7623 /* Don't go to both_summands if modifier
7624 says it's not right to return a PLUS. */
7625 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7626 goto binop2;
7627 goto both_summands;
7629 /* Use immed_double_const to ensure that the constant is
7630 truncated according to the mode of OP1, then sign extended
7631 to a HOST_WIDE_INT. Using the constant directly can result
7632 in non-canonical RTL in a 64x32 cross compile. */
7633 constant_part
7634 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7635 (HOST_WIDE_INT) 0,
7636 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7637 op0 = plus_constant (op0, INTVAL (constant_part));
7638 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7639 op0 = force_operand (op0, target);
7640 return op0;
7644 /* No sense saving up arithmetic to be done
7645 if it's all in the wrong mode to form part of an address.
7646 And force_operand won't know whether to sign-extend or
7647 zero-extend. */
7648 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7649 || mode != ptr_mode)
7650 goto binop;
7652 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7653 subtarget = 0;
7655 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7656 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7658 both_summands:
7659 /* Make sure any term that's a sum with a constant comes last. */
7660 if (GET_CODE (op0) == PLUS
7661 && CONSTANT_P (XEXP (op0, 1)))
7663 temp = op0;
7664 op0 = op1;
7665 op1 = temp;
7667 /* If adding to a sum including a constant,
7668 associate it to put the constant outside. */
7669 if (GET_CODE (op1) == PLUS
7670 && CONSTANT_P (XEXP (op1, 1)))
7672 rtx constant_term = const0_rtx;
7674 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7675 if (temp != 0)
7676 op0 = temp;
7677 /* Ensure that MULT comes first if there is one. */
7678 else if (GET_CODE (op0) == MULT)
7679 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7680 else
7681 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7683 /* Let's also eliminate constants from op0 if possible. */
7684 op0 = eliminate_constant_term (op0, &constant_term);
7686 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7687 their sum should be a constant. Form it into OP1, since the
7688 result we want will then be OP0 + OP1. */
7690 temp = simplify_binary_operation (PLUS, mode, constant_term,
7691 XEXP (op1, 1));
7692 if (temp != 0)
7693 op1 = temp;
7694 else
7695 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7698 /* Put a constant term last and put a multiplication first. */
7699 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7700 temp = op1, op1 = op0, op0 = temp;
7702 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7703 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7705 case MINUS_EXPR:
7706 /* For initializers, we are allowed to return a MINUS of two
7707 symbolic constants. Here we handle all cases when both operands
7708 are constant. */
7709 /* Handle difference of two symbolic constants,
7710 for the sake of an initializer. */
7711 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7712 && really_constant_p (TREE_OPERAND (exp, 0))
7713 && really_constant_p (TREE_OPERAND (exp, 1)))
7715 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7716 VOIDmode, ro_modifier);
7717 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7718 VOIDmode, ro_modifier);
7720 /* If the last operand is a CONST_INT, use plus_constant of
7721 the negated constant. Else make the MINUS. */
7722 if (GET_CODE (op1) == CONST_INT)
7723 return plus_constant (op0, - INTVAL (op1));
7724 else
7725 return gen_rtx_MINUS (mode, op0, op1);
7727 /* Convert A - const to A + (-const). */
7728 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7730 tree negated = fold (build1 (NEGATE_EXPR, type,
7731 TREE_OPERAND (exp, 1)));
7733 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7734 /* If we can't negate the constant in TYPE, leave it alone and
7735 expand_binop will negate it for us. We used to try to do it
7736 here in the signed version of TYPE, but that doesn't work
7737 on POINTER_TYPEs. */;
7738 else
7740 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7741 goto plus_expr;
7744 this_optab = ! unsignedp && flag_trapv
7745 && (GET_MODE_CLASS(mode) == MODE_INT)
7746 ? subv_optab : sub_optab;
7747 goto binop;
7749 case MULT_EXPR:
7750 /* If first operand is constant, swap them.
7751 Thus the following special case checks need only
7752 check the second operand. */
7753 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7755 tree t1 = TREE_OPERAND (exp, 0);
7756 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7757 TREE_OPERAND (exp, 1) = t1;
7760 /* Attempt to return something suitable for generating an
7761 indexed address, for machines that support that. */
7763 if (modifier == EXPAND_SUM && mode == ptr_mode
7764 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7765 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7767 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7768 EXPAND_SUM);
7770 /* Apply distributive law if OP0 is x+c. */
7771 if (GET_CODE (op0) == PLUS
7772 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7773 return
7774 gen_rtx_PLUS
7775 (mode,
7776 gen_rtx_MULT
7777 (mode, XEXP (op0, 0),
7778 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7779 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7780 * INTVAL (XEXP (op0, 1))));
7782 if (GET_CODE (op0) != REG)
7783 op0 = force_operand (op0, NULL_RTX);
7784 if (GET_CODE (op0) != REG)
7785 op0 = copy_to_mode_reg (mode, op0);
7787 return
7788 gen_rtx_MULT (mode, op0,
7789 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7792 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7793 subtarget = 0;
7795 /* Check for multiplying things that have been extended
7796 from a narrower type. If this machine supports multiplying
7797 in that narrower type with a result in the desired type,
7798 do it that way, and avoid the explicit type-conversion. */
7799 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7800 && TREE_CODE (type) == INTEGER_TYPE
7801 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7802 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7803 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7804 && int_fits_type_p (TREE_OPERAND (exp, 1),
7805 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7806 /* Don't use a widening multiply if a shift will do. */
7807 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7808 > HOST_BITS_PER_WIDE_INT)
7809 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7811 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7812 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7814 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7815 /* If both operands are extended, they must either both
7816 be zero-extended or both be sign-extended. */
7817 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7819 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7821 enum machine_mode innermode
7822 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7823 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7824 ? smul_widen_optab : umul_widen_optab);
7825 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7826 ? umul_widen_optab : smul_widen_optab);
7827 if (mode == GET_MODE_WIDER_MODE (innermode))
7829 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7831 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7832 NULL_RTX, VOIDmode, 0);
7833 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7834 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7835 VOIDmode, 0);
7836 else
7837 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7838 NULL_RTX, VOIDmode, 0);
7839 goto binop2;
7841 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7842 && innermode == word_mode)
7844 rtx htem;
7845 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7846 NULL_RTX, VOIDmode, 0);
7847 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7848 op1 = convert_modes (innermode, mode,
7849 expand_expr (TREE_OPERAND (exp, 1),
7850 NULL_RTX, VOIDmode, 0),
7851 unsignedp);
7852 else
7853 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7854 NULL_RTX, VOIDmode, 0);
7855 temp = expand_binop (mode, other_optab, op0, op1, target,
7856 unsignedp, OPTAB_LIB_WIDEN);
7857 htem = expand_mult_highpart_adjust (innermode,
7858 gen_highpart (innermode, temp),
7859 op0, op1,
7860 gen_highpart (innermode, temp),
7861 unsignedp);
7862 emit_move_insn (gen_highpart (innermode, temp), htem);
7863 return temp;
7867 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7868 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7869 return expand_mult (mode, op0, op1, target, unsignedp);
7871 case TRUNC_DIV_EXPR:
7872 case FLOOR_DIV_EXPR:
7873 case CEIL_DIV_EXPR:
7874 case ROUND_DIV_EXPR:
7875 case EXACT_DIV_EXPR:
7876 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7877 subtarget = 0;
7878 /* Possible optimization: compute the dividend with EXPAND_SUM
7879 then if the divisor is constant can optimize the case
7880 where some terms of the dividend have coeffs divisible by it. */
7881 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7882 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7883 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7885 case RDIV_EXPR:
7886 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7887 expensive divide. If not, combine will rebuild the original
7888 computation. */
7889 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7890 && !real_onep (TREE_OPERAND (exp, 0)))
7891 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7892 build (RDIV_EXPR, type,
7893 build_real (type, dconst1),
7894 TREE_OPERAND (exp, 1))),
7895 target, tmode, unsignedp);
7896 this_optab = sdiv_optab;
7897 goto binop;
7899 case TRUNC_MOD_EXPR:
7900 case FLOOR_MOD_EXPR:
7901 case CEIL_MOD_EXPR:
7902 case ROUND_MOD_EXPR:
7903 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7904 subtarget = 0;
7905 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7906 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7907 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7909 case FIX_ROUND_EXPR:
7910 case FIX_FLOOR_EXPR:
7911 case FIX_CEIL_EXPR:
7912 abort (); /* Not used for C. */
7914 case FIX_TRUNC_EXPR:
7915 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7916 if (target == 0)
7917 target = gen_reg_rtx (mode);
7918 expand_fix (target, op0, unsignedp);
7919 return target;
7921 case FLOAT_EXPR:
7922 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7923 if (target == 0)
7924 target = gen_reg_rtx (mode);
7925 /* expand_float can't figure out what to do if FROM has VOIDmode.
7926 So give it the correct mode. With -O, cse will optimize this. */
7927 if (GET_MODE (op0) == VOIDmode)
7928 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7929 op0);
7930 expand_float (target, op0,
7931 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7932 return target;
7934 case NEGATE_EXPR:
7935 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7936 temp = expand_unop (mode,
7937 ! unsignedp && flag_trapv
7938 && (GET_MODE_CLASS(mode) == MODE_INT)
7939 ? negv_optab : neg_optab, op0, target, 0);
7940 if (temp == 0)
7941 abort ();
7942 return temp;
7944 case ABS_EXPR:
7945 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7947 /* Handle complex values specially. */
7948 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7949 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7950 return expand_complex_abs (mode, op0, target, unsignedp);
7952 /* Unsigned abs is simply the operand. Testing here means we don't
7953 risk generating incorrect code below. */
7954 if (TREE_UNSIGNED (type))
7955 return op0;
7957 return expand_abs (mode, op0, target, unsignedp,
7958 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7960 case MAX_EXPR:
7961 case MIN_EXPR:
7962 target = original_target;
7963 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7964 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7965 || GET_MODE (target) != mode
7966 || (GET_CODE (target) == REG
7967 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7968 target = gen_reg_rtx (mode);
7969 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7970 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7972 /* First try to do it with a special MIN or MAX instruction.
7973 If that does not win, use a conditional jump to select the proper
7974 value. */
7975 this_optab = (TREE_UNSIGNED (type)
7976 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7977 : (code == MIN_EXPR ? smin_optab : smax_optab));
7979 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7980 OPTAB_WIDEN);
7981 if (temp != 0)
7982 return temp;
7984 /* At this point, a MEM target is no longer useful; we will get better
7985 code without it. */
7987 if (GET_CODE (target) == MEM)
7988 target = gen_reg_rtx (mode);
7990 if (target != op0)
7991 emit_move_insn (target, op0);
7993 op0 = gen_label_rtx ();
7995 /* If this mode is an integer too wide to compare properly,
7996 compare word by word. Rely on cse to optimize constant cases. */
7997 if (GET_MODE_CLASS (mode) == MODE_INT
7998 && ! can_compare_p (GE, mode, ccp_jump))
8000 if (code == MAX_EXPR)
8001 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8002 target, op1, NULL_RTX, op0);
8003 else
8004 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8005 op1, target, NULL_RTX, op0);
8007 else
8009 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8010 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8011 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
8012 op0);
8014 emit_move_insn (target, op1);
8015 emit_label (op0);
8016 return target;
8018 case BIT_NOT_EXPR:
8019 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8020 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8021 if (temp == 0)
8022 abort ();
8023 return temp;
8025 case FFS_EXPR:
8026 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8027 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8028 if (temp == 0)
8029 abort ();
8030 return temp;
8032 /* ??? Can optimize bitwise operations with one arg constant.
8033 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8034 and (a bitwise1 b) bitwise2 b (etc)
8035 but that is probably not worth while. */
8037 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8038 boolean values when we want in all cases to compute both of them. In
8039 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8040 as actual zero-or-1 values and then bitwise anding. In cases where
8041 there cannot be any side effects, better code would be made by
8042 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8043 how to recognize those cases. */
8045 case TRUTH_AND_EXPR:
8046 case BIT_AND_EXPR:
8047 this_optab = and_optab;
8048 goto binop;
8050 case TRUTH_OR_EXPR:
8051 case BIT_IOR_EXPR:
8052 this_optab = ior_optab;
8053 goto binop;
8055 case TRUTH_XOR_EXPR:
8056 case BIT_XOR_EXPR:
8057 this_optab = xor_optab;
8058 goto binop;
8060 case LSHIFT_EXPR:
8061 case RSHIFT_EXPR:
8062 case LROTATE_EXPR:
8063 case RROTATE_EXPR:
8064 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8065 subtarget = 0;
8066 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8067 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8068 unsignedp);
8070 /* Could determine the answer when only additive constants differ. Also,
8071 the addition of one can be handled by changing the condition. */
8072 case LT_EXPR:
8073 case LE_EXPR:
8074 case GT_EXPR:
8075 case GE_EXPR:
8076 case EQ_EXPR:
8077 case NE_EXPR:
8078 case UNORDERED_EXPR:
8079 case ORDERED_EXPR:
8080 case UNLT_EXPR:
8081 case UNLE_EXPR:
8082 case UNGT_EXPR:
8083 case UNGE_EXPR:
8084 case UNEQ_EXPR:
8085 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8086 if (temp != 0)
8087 return temp;
8089 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8090 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8091 && original_target
8092 && GET_CODE (original_target) == REG
8093 && (GET_MODE (original_target)
8094 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8096 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8097 VOIDmode, 0);
8099 if (temp != original_target)
8100 temp = copy_to_reg (temp);
8102 op1 = gen_label_rtx ();
8103 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8104 GET_MODE (temp), unsignedp, 0, op1);
8105 emit_move_insn (temp, const1_rtx);
8106 emit_label (op1);
8107 return temp;
8110 /* If no set-flag instruction, must generate a conditional
8111 store into a temporary variable. Drop through
8112 and handle this like && and ||. */
8114 case TRUTH_ANDIF_EXPR:
8115 case TRUTH_ORIF_EXPR:
8116 if (! ignore
8117 && (target == 0 || ! safe_from_p (target, exp, 1)
8118 /* Make sure we don't have a hard reg (such as function's return
8119 value) live across basic blocks, if not optimizing. */
8120 || (!optimize && GET_CODE (target) == REG
8121 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8122 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8124 if (target)
8125 emit_clr_insn (target);
8127 op1 = gen_label_rtx ();
8128 jumpifnot (exp, op1);
8130 if (target)
8131 emit_0_to_1_insn (target);
8133 emit_label (op1);
8134 return ignore ? const0_rtx : target;
8136 case TRUTH_NOT_EXPR:
8137 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8138 /* The parser is careful to generate TRUTH_NOT_EXPR
8139 only with operands that are always zero or one. */
8140 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8141 target, 1, OPTAB_LIB_WIDEN);
8142 if (temp == 0)
8143 abort ();
8144 return temp;
8146 case COMPOUND_EXPR:
8147 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8148 emit_queue ();
8149 return expand_expr (TREE_OPERAND (exp, 1),
8150 (ignore ? const0_rtx : target),
8151 VOIDmode, 0);
8153 case COND_EXPR:
8154 /* If we would have a "singleton" (see below) were it not for a
8155 conversion in each arm, bring that conversion back out. */
8156 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8157 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8158 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8159 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8161 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8162 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8164 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8165 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8166 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8167 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8168 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8169 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8170 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8171 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8172 return expand_expr (build1 (NOP_EXPR, type,
8173 build (COND_EXPR, TREE_TYPE (iftrue),
8174 TREE_OPERAND (exp, 0),
8175 iftrue, iffalse)),
8176 target, tmode, modifier);
8180 /* Note that COND_EXPRs whose type is a structure or union
8181 are required to be constructed to contain assignments of
8182 a temporary variable, so that we can evaluate them here
8183 for side effect only. If type is void, we must do likewise. */
8185 /* If an arm of the branch requires a cleanup,
8186 only that cleanup is performed. */
8188 tree singleton = 0;
8189 tree binary_op = 0, unary_op = 0;
8191 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8192 convert it to our mode, if necessary. */
8193 if (integer_onep (TREE_OPERAND (exp, 1))
8194 && integer_zerop (TREE_OPERAND (exp, 2))
8195 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8197 if (ignore)
8199 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8200 ro_modifier);
8201 return const0_rtx;
8204 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
8205 if (GET_MODE (op0) == mode)
8206 return op0;
8208 if (target == 0)
8209 target = gen_reg_rtx (mode);
8210 convert_move (target, op0, unsignedp);
8211 return target;
8214 /* Check for X ? A + B : A. If we have this, we can copy A to the
8215 output and conditionally add B. Similarly for unary operations.
8216 Don't do this if X has side-effects because those side effects
8217 might affect A or B and the "?" operation is a sequence point in
8218 ANSI. (operand_equal_p tests for side effects.) */
8220 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8221 && operand_equal_p (TREE_OPERAND (exp, 2),
8222 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8223 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8224 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8225 && operand_equal_p (TREE_OPERAND (exp, 1),
8226 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8227 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8228 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8229 && operand_equal_p (TREE_OPERAND (exp, 2),
8230 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8231 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8232 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8233 && operand_equal_p (TREE_OPERAND (exp, 1),
8234 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8235 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8237 /* If we are not to produce a result, we have no target. Otherwise,
8238 if a target was specified use it; it will not be used as an
8239 intermediate target unless it is safe. If no target, use a
8240 temporary. */
8242 if (ignore)
8243 temp = 0;
8244 else if (original_target
8245 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8246 || (singleton && GET_CODE (original_target) == REG
8247 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8248 && original_target == var_rtx (singleton)))
8249 && GET_MODE (original_target) == mode
8250 #ifdef HAVE_conditional_move
8251 && (! can_conditionally_move_p (mode)
8252 || GET_CODE (original_target) == REG
8253 || TREE_ADDRESSABLE (type))
8254 #endif
8255 && (GET_CODE (original_target) != MEM
8256 || TREE_ADDRESSABLE (type)))
8257 temp = original_target;
8258 else if (TREE_ADDRESSABLE (type))
8259 abort ();
8260 else
8261 temp = assign_temp (type, 0, 0, 1);
8263 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8264 do the test of X as a store-flag operation, do this as
8265 A + ((X != 0) << log C). Similarly for other simple binary
8266 operators. Only do for C == 1 if BRANCH_COST is low. */
8267 if (temp && singleton && binary_op
8268 && (TREE_CODE (binary_op) == PLUS_EXPR
8269 || TREE_CODE (binary_op) == MINUS_EXPR
8270 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8271 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8272 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8273 : integer_onep (TREE_OPERAND (binary_op, 1)))
8274 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8276 rtx result;
8277 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8278 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8279 ? addv_optab : add_optab)
8280 : TREE_CODE (binary_op) == MINUS_EXPR
8281 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8282 ? subv_optab : sub_optab)
8283 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8284 : xor_optab);
8286 /* If we had X ? A : A + 1, do this as A + (X == 0).
8288 We have to invert the truth value here and then put it
8289 back later if do_store_flag fails. We cannot simply copy
8290 TREE_OPERAND (exp, 0) to another variable and modify that
8291 because invert_truthvalue can modify the tree pointed to
8292 by its argument. */
8293 if (singleton == TREE_OPERAND (exp, 1))
8294 TREE_OPERAND (exp, 0)
8295 = invert_truthvalue (TREE_OPERAND (exp, 0));
8297 result = do_store_flag (TREE_OPERAND (exp, 0),
8298 (safe_from_p (temp, singleton, 1)
8299 ? temp : NULL_RTX),
8300 mode, BRANCH_COST <= 1);
8302 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8303 result = expand_shift (LSHIFT_EXPR, mode, result,
8304 build_int_2 (tree_log2
8305 (TREE_OPERAND
8306 (binary_op, 1)),
8308 (safe_from_p (temp, singleton, 1)
8309 ? temp : NULL_RTX), 0);
8311 if (result)
8313 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8314 return expand_binop (mode, boptab, op1, result, temp,
8315 unsignedp, OPTAB_LIB_WIDEN);
8317 else if (singleton == TREE_OPERAND (exp, 1))
8318 TREE_OPERAND (exp, 0)
8319 = invert_truthvalue (TREE_OPERAND (exp, 0));
8322 do_pending_stack_adjust ();
8323 NO_DEFER_POP;
8324 op0 = gen_label_rtx ();
8326 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8328 if (temp != 0)
8330 /* If the target conflicts with the other operand of the
8331 binary op, we can't use it. Also, we can't use the target
8332 if it is a hard register, because evaluating the condition
8333 might clobber it. */
8334 if ((binary_op
8335 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8336 || (GET_CODE (temp) == REG
8337 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8338 temp = gen_reg_rtx (mode);
8339 store_expr (singleton, temp, 0);
8341 else
8342 expand_expr (singleton,
8343 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8344 if (singleton == TREE_OPERAND (exp, 1))
8345 jumpif (TREE_OPERAND (exp, 0), op0);
8346 else
8347 jumpifnot (TREE_OPERAND (exp, 0), op0);
8349 start_cleanup_deferral ();
8350 if (binary_op && temp == 0)
8351 /* Just touch the other operand. */
8352 expand_expr (TREE_OPERAND (binary_op, 1),
8353 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8354 else if (binary_op)
8355 store_expr (build (TREE_CODE (binary_op), type,
8356 make_tree (type, temp),
8357 TREE_OPERAND (binary_op, 1)),
8358 temp, 0);
8359 else
8360 store_expr (build1 (TREE_CODE (unary_op), type,
8361 make_tree (type, temp)),
8362 temp, 0);
8363 op1 = op0;
8365 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8366 comparison operator. If we have one of these cases, set the
8367 output to A, branch on A (cse will merge these two references),
8368 then set the output to FOO. */
8369 else if (temp
8370 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8371 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8372 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8373 TREE_OPERAND (exp, 1), 0)
8374 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8375 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8376 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8378 if (GET_CODE (temp) == REG
8379 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8380 temp = gen_reg_rtx (mode);
8381 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8382 jumpif (TREE_OPERAND (exp, 0), op0);
8384 start_cleanup_deferral ();
8385 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8386 op1 = op0;
8388 else if (temp
8389 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8390 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8391 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8392 TREE_OPERAND (exp, 2), 0)
8393 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8394 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8395 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8397 if (GET_CODE (temp) == REG
8398 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8399 temp = gen_reg_rtx (mode);
8400 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8401 jumpifnot (TREE_OPERAND (exp, 0), op0);
8403 start_cleanup_deferral ();
8404 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8405 op1 = op0;
8407 else
8409 op1 = gen_label_rtx ();
8410 jumpifnot (TREE_OPERAND (exp, 0), op0);
8412 start_cleanup_deferral ();
8414 /* One branch of the cond can be void, if it never returns. For
8415 example A ? throw : E */
8416 if (temp != 0
8417 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8418 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8419 else
8420 expand_expr (TREE_OPERAND (exp, 1),
8421 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8422 end_cleanup_deferral ();
8423 emit_queue ();
8424 emit_jump_insn (gen_jump (op1));
8425 emit_barrier ();
8426 emit_label (op0);
8427 start_cleanup_deferral ();
8428 if (temp != 0
8429 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8430 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8431 else
8432 expand_expr (TREE_OPERAND (exp, 2),
8433 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8436 end_cleanup_deferral ();
8438 emit_queue ();
8439 emit_label (op1);
8440 OK_DEFER_POP;
8442 return temp;
8445 case TARGET_EXPR:
8447 /* Something needs to be initialized, but we didn't know
8448 where that thing was when building the tree. For example,
8449 it could be the return value of a function, or a parameter
8450 to a function which lays down in the stack, or a temporary
8451 variable which must be passed by reference.
8453 We guarantee that the expression will either be constructed
8454 or copied into our original target. */
8456 tree slot = TREE_OPERAND (exp, 0);
8457 tree cleanups = NULL_TREE;
8458 tree exp1;
8460 if (TREE_CODE (slot) != VAR_DECL)
8461 abort ();
8463 if (! ignore)
8464 target = original_target;
8466 /* Set this here so that if we get a target that refers to a
8467 register variable that's already been used, put_reg_into_stack
8468 knows that it should fix up those uses. */
8469 TREE_USED (slot) = 1;
8471 if (target == 0)
8473 if (DECL_RTL_SET_P (slot))
8475 target = DECL_RTL (slot);
8476 /* If we have already expanded the slot, so don't do
8477 it again. (mrs) */
8478 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8479 return target;
8481 else
8483 target = assign_temp (type, 2, 0, 1);
8484 /* All temp slots at this level must not conflict. */
8485 preserve_temp_slots (target);
8486 SET_DECL_RTL (slot, target);
8487 if (TREE_ADDRESSABLE (slot))
8488 put_var_into_stack (slot);
8490 /* Since SLOT is not known to the called function
8491 to belong to its stack frame, we must build an explicit
8492 cleanup. This case occurs when we must build up a reference
8493 to pass the reference as an argument. In this case,
8494 it is very likely that such a reference need not be
8495 built here. */
8497 if (TREE_OPERAND (exp, 2) == 0)
8498 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8499 cleanups = TREE_OPERAND (exp, 2);
8502 else
8504 /* This case does occur, when expanding a parameter which
8505 needs to be constructed on the stack. The target
8506 is the actual stack address that we want to initialize.
8507 The function we call will perform the cleanup in this case. */
8509 /* If we have already assigned it space, use that space,
8510 not target that we were passed in, as our target
8511 parameter is only a hint. */
8512 if (DECL_RTL_SET_P (slot))
8514 target = DECL_RTL (slot);
8515 /* If we have already expanded the slot, so don't do
8516 it again. (mrs) */
8517 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8518 return target;
8520 else
8522 SET_DECL_RTL (slot, target);
8523 /* If we must have an addressable slot, then make sure that
8524 the RTL that we just stored in slot is OK. */
8525 if (TREE_ADDRESSABLE (slot))
8526 put_var_into_stack (slot);
8530 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8531 /* Mark it as expanded. */
8532 TREE_OPERAND (exp, 1) = NULL_TREE;
8534 store_expr (exp1, target, 0);
8536 expand_decl_cleanup (NULL_TREE, cleanups);
8538 return target;
8541 case INIT_EXPR:
8543 tree lhs = TREE_OPERAND (exp, 0);
8544 tree rhs = TREE_OPERAND (exp, 1);
8546 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8547 return temp;
8550 case MODIFY_EXPR:
8552 /* If lhs is complex, expand calls in rhs before computing it.
8553 That's so we don't compute a pointer and save it over a
8554 call. If lhs is simple, compute it first so we can give it
8555 as a target if the rhs is just a call. This avoids an
8556 extra temp and copy and that prevents a partial-subsumption
8557 which makes bad code. Actually we could treat
8558 component_ref's of vars like vars. */
8560 tree lhs = TREE_OPERAND (exp, 0);
8561 tree rhs = TREE_OPERAND (exp, 1);
8563 temp = 0;
8565 /* Check for |= or &= of a bitfield of size one into another bitfield
8566 of size 1. In this case, (unless we need the result of the
8567 assignment) we can do this more efficiently with a
8568 test followed by an assignment, if necessary.
8570 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8571 things change so we do, this code should be enhanced to
8572 support it. */
8573 if (ignore
8574 && TREE_CODE (lhs) == COMPONENT_REF
8575 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8576 || TREE_CODE (rhs) == BIT_AND_EXPR)
8577 && TREE_OPERAND (rhs, 0) == lhs
8578 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8579 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8580 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8582 rtx label = gen_label_rtx ();
8584 do_jump (TREE_OPERAND (rhs, 1),
8585 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8586 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8587 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8588 (TREE_CODE (rhs) == BIT_IOR_EXPR
8589 ? integer_one_node
8590 : integer_zero_node)),
8591 0, 0);
8592 do_pending_stack_adjust ();
8593 emit_label (label);
8594 return const0_rtx;
8597 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8599 return temp;
8602 case RETURN_EXPR:
8603 if (!TREE_OPERAND (exp, 0))
8604 expand_null_return ();
8605 else
8606 expand_return (TREE_OPERAND (exp, 0));
8607 return const0_rtx;
8609 case PREINCREMENT_EXPR:
8610 case PREDECREMENT_EXPR:
8611 return expand_increment (exp, 0, ignore);
8613 case POSTINCREMENT_EXPR:
8614 case POSTDECREMENT_EXPR:
8615 /* Faster to treat as pre-increment if result is not used. */
8616 return expand_increment (exp, ! ignore, ignore);
8618 case ADDR_EXPR:
8619 /* If nonzero, TEMP will be set to the address of something that might
8620 be a MEM corresponding to a stack slot. */
8621 temp = 0;
8623 /* Are we taking the address of a nested function? */
8624 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8625 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8626 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8627 && ! TREE_STATIC (exp))
8629 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8630 op0 = force_operand (op0, target);
8632 /* If we are taking the address of something erroneous, just
8633 return a zero. */
8634 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8635 return const0_rtx;
8636 else
8638 /* We make sure to pass const0_rtx down if we came in with
8639 ignore set, to avoid doing the cleanups twice for something. */
8640 op0 = expand_expr (TREE_OPERAND (exp, 0),
8641 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8642 (modifier == EXPAND_INITIALIZER
8643 ? modifier : EXPAND_CONST_ADDRESS));
8645 /* If we are going to ignore the result, OP0 will have been set
8646 to const0_rtx, so just return it. Don't get confused and
8647 think we are taking the address of the constant. */
8648 if (ignore)
8649 return op0;
8651 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8652 clever and returns a REG when given a MEM. */
8653 op0 = protect_from_queue (op0, 1);
8655 /* We would like the object in memory. If it is a constant, we can
8656 have it be statically allocated into memory. For a non-constant,
8657 we need to allocate some memory and store the value into it. */
8659 if (CONSTANT_P (op0))
8660 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8661 op0);
8662 else if (GET_CODE (op0) == MEM)
8664 mark_temp_addr_taken (op0);
8665 temp = XEXP (op0, 0);
8668 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8669 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8670 || GET_CODE (op0) == PARALLEL)
8672 /* If this object is in a register, it must be not
8673 be BLKmode. */
8674 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8675 tree nt = build_qualified_type (inner_type,
8676 (TYPE_QUALS (inner_type)
8677 | TYPE_QUAL_CONST));
8678 rtx memloc = assign_temp (nt, 1, 1, 1);
8680 mark_temp_addr_taken (memloc);
8681 if (GET_CODE (op0) == PARALLEL)
8682 /* Handle calls that pass values in multiple non-contiguous
8683 locations. The Irix 6 ABI has examples of this. */
8684 emit_group_store (memloc, op0,
8685 int_size_in_bytes (inner_type),
8686 TYPE_ALIGN (inner_type));
8687 else
8688 emit_move_insn (memloc, op0);
8689 op0 = memloc;
8692 if (GET_CODE (op0) != MEM)
8693 abort ();
8695 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8697 temp = XEXP (op0, 0);
8698 #ifdef POINTERS_EXTEND_UNSIGNED
8699 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8700 && mode == ptr_mode)
8701 temp = convert_memory_address (ptr_mode, temp);
8702 #endif
8703 return temp;
8706 op0 = force_operand (XEXP (op0, 0), target);
8709 if (flag_force_addr && GET_CODE (op0) != REG)
8710 op0 = force_reg (Pmode, op0);
8712 if (GET_CODE (op0) == REG
8713 && ! REG_USERVAR_P (op0))
8714 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8716 /* If we might have had a temp slot, add an equivalent address
8717 for it. */
8718 if (temp != 0)
8719 update_temp_slot_address (temp, op0);
8721 #ifdef POINTERS_EXTEND_UNSIGNED
8722 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8723 && mode == ptr_mode)
8724 op0 = convert_memory_address (ptr_mode, op0);
8725 #endif
8727 return op0;
8729 case ENTRY_VALUE_EXPR:
8730 abort ();
8732 /* COMPLEX type for Extended Pascal & Fortran */
8733 case COMPLEX_EXPR:
8735 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8736 rtx insns;
8738 /* Get the rtx code of the operands. */
8739 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8740 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8742 if (! target)
8743 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8745 start_sequence ();
8747 /* Move the real (op0) and imaginary (op1) parts to their location. */
8748 emit_move_insn (gen_realpart (mode, target), op0);
8749 emit_move_insn (gen_imagpart (mode, target), op1);
8751 insns = get_insns ();
8752 end_sequence ();
8754 /* Complex construction should appear as a single unit. */
8755 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8756 each with a separate pseudo as destination.
8757 It's not correct for flow to treat them as a unit. */
8758 if (GET_CODE (target) != CONCAT)
8759 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8760 else
8761 emit_insns (insns);
8763 return target;
8766 case REALPART_EXPR:
8767 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8768 return gen_realpart (mode, op0);
8770 case IMAGPART_EXPR:
8771 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8772 return gen_imagpart (mode, op0);
8774 case CONJ_EXPR:
8776 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8777 rtx imag_t;
8778 rtx insns;
8780 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8782 if (! target)
8783 target = gen_reg_rtx (mode);
8785 start_sequence ();
8787 /* Store the realpart and the negated imagpart to target. */
8788 emit_move_insn (gen_realpart (partmode, target),
8789 gen_realpart (partmode, op0));
8791 imag_t = gen_imagpart (partmode, target);
8792 temp = expand_unop (partmode,
8793 ! unsignedp && flag_trapv
8794 && (GET_MODE_CLASS(partmode) == MODE_INT)
8795 ? negv_optab : neg_optab,
8796 gen_imagpart (partmode, op0), imag_t, 0);
8797 if (temp != imag_t)
8798 emit_move_insn (imag_t, temp);
8800 insns = get_insns ();
8801 end_sequence ();
8803 /* Conjugate should appear as a single unit
8804 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8805 each with a separate pseudo as destination.
8806 It's not correct for flow to treat them as a unit. */
8807 if (GET_CODE (target) != CONCAT)
8808 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8809 else
8810 emit_insns (insns);
8812 return target;
8815 case TRY_CATCH_EXPR:
8817 tree handler = TREE_OPERAND (exp, 1);
8819 expand_eh_region_start ();
8821 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8823 expand_eh_region_end_cleanup (handler);
8825 return op0;
8828 case TRY_FINALLY_EXPR:
8830 tree try_block = TREE_OPERAND (exp, 0);
8831 tree finally_block = TREE_OPERAND (exp, 1);
8832 rtx finally_label = gen_label_rtx ();
8833 rtx done_label = gen_label_rtx ();
8834 rtx return_link = gen_reg_rtx (Pmode);
8835 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8836 (tree) finally_label, (tree) return_link);
8837 TREE_SIDE_EFFECTS (cleanup) = 1;
8839 /* Start a new binding layer that will keep track of all cleanup
8840 actions to be performed. */
8841 expand_start_bindings (2);
8843 target_temp_slot_level = temp_slot_level;
8845 expand_decl_cleanup (NULL_TREE, cleanup);
8846 op0 = expand_expr (try_block, target, tmode, modifier);
8848 preserve_temp_slots (op0);
8849 expand_end_bindings (NULL_TREE, 0, 0);
8850 emit_jump (done_label);
8851 emit_label (finally_label);
8852 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8853 emit_indirect_jump (return_link);
8854 emit_label (done_label);
8855 return op0;
8858 case GOTO_SUBROUTINE_EXPR:
8860 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8861 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8862 rtx return_address = gen_label_rtx ();
8863 emit_move_insn (return_link,
8864 gen_rtx_LABEL_REF (Pmode, return_address));
8865 emit_jump (subr);
8866 emit_label (return_address);
8867 return const0_rtx;
8870 case VA_ARG_EXPR:
8871 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8873 case EXC_PTR_EXPR:
8874 return get_exception_pointer (cfun);
8876 case FDESC_EXPR:
8877 /* Function descriptors are not valid except for as
8878 initialization constants, and should not be expanded. */
8879 abort ();
8881 default:
8882 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8885 /* Here to do an ordinary binary operator, generating an instruction
8886 from the optab already placed in `this_optab'. */
8887 binop:
8888 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8889 subtarget = 0;
8890 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8891 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8892 binop2:
8893 temp = expand_binop (mode, this_optab, op0, op1, target,
8894 unsignedp, OPTAB_LIB_WIDEN);
8895 if (temp == 0)
8896 abort ();
8897 return temp;
8900 /* Similar to expand_expr, except that we don't specify a target, target
8901 mode, or modifier and we return the alignment of the inner type. This is
8902 used in cases where it is not necessary to align the result to the
8903 alignment of its type as long as we know the alignment of the result, for
8904 example for comparisons of BLKmode values. */
8906 static rtx
8907 expand_expr_unaligned (exp, palign)
8908 tree exp;
8909 unsigned int *palign;
8911 rtx op0;
8912 tree type = TREE_TYPE (exp);
8913 enum machine_mode mode = TYPE_MODE (type);
8915 /* Default the alignment we return to that of the type. */
8916 *palign = TYPE_ALIGN (type);
8918 /* The only cases in which we do anything special is if the resulting mode
8919 is BLKmode. */
8920 if (mode != BLKmode)
8921 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8923 switch (TREE_CODE (exp))
8925 case CONVERT_EXPR:
8926 case NOP_EXPR:
8927 case NON_LVALUE_EXPR:
8928 /* Conversions between BLKmode values don't change the underlying
8929 alignment or value. */
8930 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8931 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8932 break;
8934 case ARRAY_REF:
8935 /* Much of the code for this case is copied directly from expand_expr.
8936 We need to duplicate it here because we will do something different
8937 in the fall-through case, so we need to handle the same exceptions
8938 it does. */
8940 tree array = TREE_OPERAND (exp, 0);
8941 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8942 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8943 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8944 HOST_WIDE_INT i;
8946 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8947 abort ();
8949 /* Optimize the special-case of a zero lower bound.
8951 We convert the low_bound to sizetype to avoid some problems
8952 with constant folding. (E.g. suppose the lower bound is 1,
8953 and its mode is QI. Without the conversion, (ARRAY
8954 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8955 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8957 if (! integer_zerop (low_bound))
8958 index = size_diffop (index, convert (sizetype, low_bound));
8960 /* If this is a constant index into a constant array,
8961 just get the value from the array. Handle both the cases when
8962 we have an explicit constructor and when our operand is a variable
8963 that was declared const. */
8965 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8966 && host_integerp (index, 0)
8967 && 0 > compare_tree_int (index,
8968 list_length (CONSTRUCTOR_ELTS
8969 (TREE_OPERAND (exp, 0)))))
8971 tree elem;
8973 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8974 i = tree_low_cst (index, 0);
8975 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8978 if (elem)
8979 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8982 else if (optimize >= 1
8983 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8984 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8985 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8987 if (TREE_CODE (index) == INTEGER_CST)
8989 tree init = DECL_INITIAL (array);
8991 if (TREE_CODE (init) == CONSTRUCTOR)
8993 tree elem;
8995 for (elem = CONSTRUCTOR_ELTS (init);
8996 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8997 elem = TREE_CHAIN (elem))
9000 if (elem)
9001 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
9002 palign);
9007 /* Fall through. */
9009 case COMPONENT_REF:
9010 case BIT_FIELD_REF:
9011 case ARRAY_RANGE_REF:
9012 /* If the operand is a CONSTRUCTOR, we can just extract the
9013 appropriate field if it is present. Don't do this if we have
9014 already written the data since we want to refer to that copy
9015 and varasm.c assumes that's what we'll do. */
9016 if (TREE_CODE (exp) == COMPONENT_REF
9017 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
9018 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
9020 tree elt;
9022 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
9023 elt = TREE_CHAIN (elt))
9024 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
9025 /* Note that unlike the case in expand_expr, we know this is
9026 BLKmode and hence not an integer. */
9027 return expand_expr_unaligned (TREE_VALUE (elt), palign);
9031 enum machine_mode mode1;
9032 HOST_WIDE_INT bitsize, bitpos;
9033 tree offset;
9034 int volatilep = 0;
9035 unsigned int alignment;
9036 int unsignedp;
9037 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
9038 &mode1, &unsignedp, &volatilep,
9039 &alignment);
9041 /* If we got back the original object, something is wrong. Perhaps
9042 we are evaluating an expression too early. In any event, don't
9043 infinitely recurse. */
9044 if (tem == exp)
9045 abort ();
9047 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9049 /* If this is a constant, put it into a register if it is a
9050 legitimate constant and OFFSET is 0 and memory if it isn't. */
9051 if (CONSTANT_P (op0))
9053 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
9055 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
9056 && offset == 0)
9057 op0 = force_reg (inner_mode, op0);
9058 else
9059 op0 = validize_mem (force_const_mem (inner_mode, op0));
9062 if (offset != 0)
9064 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
9066 /* If this object is in a register, put it into memory.
9067 This case can't occur in C, but can in Ada if we have
9068 unchecked conversion of an expression from a scalar type to
9069 an array or record type. */
9070 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9071 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
9073 tree nt = build_qualified_type (TREE_TYPE (tem),
9074 (TYPE_QUALS (TREE_TYPE (tem))
9075 | TYPE_QUAL_CONST));
9076 rtx memloc = assign_temp (nt, 1, 1, 1);
9078 mark_temp_addr_taken (memloc);
9079 emit_move_insn (memloc, op0);
9080 op0 = memloc;
9083 if (GET_CODE (op0) != MEM)
9084 abort ();
9086 if (GET_MODE (offset_rtx) != ptr_mode)
9087 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
9089 #ifdef POINTERS_EXTEND_UNSIGNED
9090 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
9091 #endif
9093 op0 = offset_address (op0, offset_rtx,
9094 highest_pow2_factor (offset));
9097 /* Don't forget about volatility even if this is a bitfield. */
9098 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
9100 op0 = copy_rtx (op0);
9101 MEM_VOLATILE_P (op0) = 1;
9104 /* Check the access. */
9105 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
9107 rtx to;
9108 int size;
9110 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
9111 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
9113 /* Check the access right of the pointer. */
9114 in_check_memory_usage = 1;
9115 if (size > BITS_PER_UNIT)
9116 emit_library_call (chkr_check_addr_libfunc,
9117 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
9118 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
9119 TYPE_MODE (sizetype),
9120 GEN_INT (MEMORY_USE_RO),
9121 TYPE_MODE (integer_type_node));
9122 in_check_memory_usage = 0;
9125 /* In cases where an aligned union has an unaligned object
9126 as a field, we might be extracting a BLKmode value from
9127 an integer-mode (e.g., SImode) object. Handle this case
9128 by doing the extract into an object as wide as the field
9129 (which we know to be the width of a basic mode), then
9130 storing into memory, and changing the mode to BLKmode.
9131 If we ultimately want the address (EXPAND_CONST_ADDRESS or
9132 EXPAND_INITIALIZER), then we must not copy to a temporary. */
9133 if (mode1 == VOIDmode
9134 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9135 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
9136 && (TYPE_ALIGN (type) > alignment
9137 || bitpos % TYPE_ALIGN (type) != 0)))
9139 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9141 if (ext_mode == BLKmode)
9143 /* In this case, BITPOS must start at a byte boundary. */
9144 if (GET_CODE (op0) != MEM
9145 || bitpos % BITS_PER_UNIT != 0)
9146 abort ();
9148 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
9150 else
9152 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
9153 TYPE_QUAL_CONST);
9154 rtx new = assign_temp (nt, 0, 1, 1);
9156 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
9157 unsignedp, NULL_RTX, ext_mode,
9158 ext_mode, alignment,
9159 int_size_in_bytes (TREE_TYPE (tem)));
9161 /* If the result is a record type and BITSIZE is narrower than
9162 the mode of OP0, an integral mode, and this is a big endian
9163 machine, we must put the field into the high-order bits. */
9164 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9165 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9166 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
9167 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9168 size_int (GET_MODE_BITSIZE
9169 (GET_MODE (op0))
9170 - bitsize),
9171 op0, 1);
9173 emit_move_insn (new, op0);
9174 op0 = copy_rtx (new);
9175 PUT_MODE (op0, BLKmode);
9178 else
9179 /* Get a reference to just this component. */
9180 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9182 set_mem_alias_set (op0, get_alias_set (exp));
9184 /* Adjust the alignment in case the bit position is not
9185 a multiple of the alignment of the inner object. */
9186 while (bitpos % alignment != 0)
9187 alignment >>= 1;
9189 if (GET_CODE (XEXP (op0, 0)) == REG)
9190 mark_reg_pointer (XEXP (op0, 0), alignment);
9192 MEM_IN_STRUCT_P (op0) = 1;
9193 MEM_VOLATILE_P (op0) |= volatilep;
9195 *palign = alignment;
9196 return op0;
9199 default:
9200 break;
9204 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9207 /* Return the tree node if a ARG corresponds to a string constant or zero
9208 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9209 in bytes within the string that ARG is accessing. The type of the
9210 offset will be `sizetype'. */
9212 tree
9213 string_constant (arg, ptr_offset)
9214 tree arg;
9215 tree *ptr_offset;
9217 STRIP_NOPS (arg);
9219 if (TREE_CODE (arg) == ADDR_EXPR
9220 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9222 *ptr_offset = size_zero_node;
9223 return TREE_OPERAND (arg, 0);
9225 else if (TREE_CODE (arg) == PLUS_EXPR)
9227 tree arg0 = TREE_OPERAND (arg, 0);
9228 tree arg1 = TREE_OPERAND (arg, 1);
9230 STRIP_NOPS (arg0);
9231 STRIP_NOPS (arg1);
9233 if (TREE_CODE (arg0) == ADDR_EXPR
9234 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9236 *ptr_offset = convert (sizetype, arg1);
9237 return TREE_OPERAND (arg0, 0);
9239 else if (TREE_CODE (arg1) == ADDR_EXPR
9240 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9242 *ptr_offset = convert (sizetype, arg0);
9243 return TREE_OPERAND (arg1, 0);
9247 return 0;
9250 /* Expand code for a post- or pre- increment or decrement
9251 and return the RTX for the result.
9252 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9254 static rtx
9255 expand_increment (exp, post, ignore)
9256 tree exp;
9257 int post, ignore;
9259 rtx op0, op1;
9260 rtx temp, value;
9261 tree incremented = TREE_OPERAND (exp, 0);
9262 optab this_optab = add_optab;
9263 int icode;
9264 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9265 int op0_is_copy = 0;
9266 int single_insn = 0;
9267 /* 1 means we can't store into OP0 directly,
9268 because it is a subreg narrower than a word,
9269 and we don't dare clobber the rest of the word. */
9270 int bad_subreg = 0;
9272 /* Stabilize any component ref that might need to be
9273 evaluated more than once below. */
9274 if (!post
9275 || TREE_CODE (incremented) == BIT_FIELD_REF
9276 || (TREE_CODE (incremented) == COMPONENT_REF
9277 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9278 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9279 incremented = stabilize_reference (incremented);
9280 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9281 ones into save exprs so that they don't accidentally get evaluated
9282 more than once by the code below. */
9283 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9284 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9285 incremented = save_expr (incremented);
9287 /* Compute the operands as RTX.
9288 Note whether OP0 is the actual lvalue or a copy of it:
9289 I believe it is a copy iff it is a register or subreg
9290 and insns were generated in computing it. */
9292 temp = get_last_insn ();
9293 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9295 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9296 in place but instead must do sign- or zero-extension during assignment,
9297 so we copy it into a new register and let the code below use it as
9298 a copy.
9300 Note that we can safely modify this SUBREG since it is know not to be
9301 shared (it was made by the expand_expr call above). */
9303 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9305 if (post)
9306 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9307 else
9308 bad_subreg = 1;
9310 else if (GET_CODE (op0) == SUBREG
9311 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9313 /* We cannot increment this SUBREG in place. If we are
9314 post-incrementing, get a copy of the old value. Otherwise,
9315 just mark that we cannot increment in place. */
9316 if (post)
9317 op0 = copy_to_reg (op0);
9318 else
9319 bad_subreg = 1;
9322 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9323 && temp != get_last_insn ());
9324 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9325 EXPAND_MEMORY_USE_BAD);
9327 /* Decide whether incrementing or decrementing. */
9328 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9329 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9330 this_optab = sub_optab;
9332 /* Convert decrement by a constant into a negative increment. */
9333 if (this_optab == sub_optab
9334 && GET_CODE (op1) == CONST_INT)
9336 op1 = GEN_INT (-INTVAL (op1));
9337 this_optab = add_optab;
9340 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9341 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9343 /* For a preincrement, see if we can do this with a single instruction. */
9344 if (!post)
9346 icode = (int) this_optab->handlers[(int) mode].insn_code;
9347 if (icode != (int) CODE_FOR_nothing
9348 /* Make sure that OP0 is valid for operands 0 and 1
9349 of the insn we want to queue. */
9350 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9351 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9352 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9353 single_insn = 1;
9356 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9357 then we cannot just increment OP0. We must therefore contrive to
9358 increment the original value. Then, for postincrement, we can return
9359 OP0 since it is a copy of the old value. For preincrement, expand here
9360 unless we can do it with a single insn.
9362 Likewise if storing directly into OP0 would clobber high bits
9363 we need to preserve (bad_subreg). */
9364 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9366 /* This is the easiest way to increment the value wherever it is.
9367 Problems with multiple evaluation of INCREMENTED are prevented
9368 because either (1) it is a component_ref or preincrement,
9369 in which case it was stabilized above, or (2) it is an array_ref
9370 with constant index in an array in a register, which is
9371 safe to reevaluate. */
9372 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9373 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9374 ? MINUS_EXPR : PLUS_EXPR),
9375 TREE_TYPE (exp),
9376 incremented,
9377 TREE_OPERAND (exp, 1));
9379 while (TREE_CODE (incremented) == NOP_EXPR
9380 || TREE_CODE (incremented) == CONVERT_EXPR)
9382 newexp = convert (TREE_TYPE (incremented), newexp);
9383 incremented = TREE_OPERAND (incremented, 0);
9386 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9387 return post ? op0 : temp;
9390 if (post)
9392 /* We have a true reference to the value in OP0.
9393 If there is an insn to add or subtract in this mode, queue it.
9394 Queueing the increment insn avoids the register shuffling
9395 that often results if we must increment now and first save
9396 the old value for subsequent use. */
9398 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9399 op0 = stabilize (op0);
9400 #endif
9402 icode = (int) this_optab->handlers[(int) mode].insn_code;
9403 if (icode != (int) CODE_FOR_nothing
9404 /* Make sure that OP0 is valid for operands 0 and 1
9405 of the insn we want to queue. */
9406 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9407 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9409 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9410 op1 = force_reg (mode, op1);
9412 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9414 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9416 rtx addr = (general_operand (XEXP (op0, 0), mode)
9417 ? force_reg (Pmode, XEXP (op0, 0))
9418 : copy_to_reg (XEXP (op0, 0)));
9419 rtx temp, result;
9421 op0 = replace_equiv_address (op0, addr);
9422 temp = force_reg (GET_MODE (op0), op0);
9423 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9424 op1 = force_reg (mode, op1);
9426 /* The increment queue is LIFO, thus we have to `queue'
9427 the instructions in reverse order. */
9428 enqueue_insn (op0, gen_move_insn (op0, temp));
9429 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9430 return result;
9434 /* Preincrement, or we can't increment with one simple insn. */
9435 if (post)
9436 /* Save a copy of the value before inc or dec, to return it later. */
9437 temp = value = copy_to_reg (op0);
9438 else
9439 /* Arrange to return the incremented value. */
9440 /* Copy the rtx because expand_binop will protect from the queue,
9441 and the results of that would be invalid for us to return
9442 if our caller does emit_queue before using our result. */
9443 temp = copy_rtx (value = op0);
9445 /* Increment however we can. */
9446 op1 = expand_binop (mode, this_optab, value, op1,
9447 current_function_check_memory_usage ? NULL_RTX : op0,
9448 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9449 /* Make sure the value is stored into OP0. */
9450 if (op1 != op0)
9451 emit_move_insn (op0, op1);
9453 return temp;
9456 /* At the start of a function, record that we have no previously-pushed
9457 arguments waiting to be popped. */
9459 void
9460 init_pending_stack_adjust ()
9462 pending_stack_adjust = 0;
9465 /* When exiting from function, if safe, clear out any pending stack adjust
9466 so the adjustment won't get done.
9468 Note, if the current function calls alloca, then it must have a
9469 frame pointer regardless of the value of flag_omit_frame_pointer. */
9471 void
9472 clear_pending_stack_adjust ()
9474 #ifdef EXIT_IGNORE_STACK
9475 if (optimize > 0
9476 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9477 && EXIT_IGNORE_STACK
9478 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9479 && ! flag_inline_functions)
9481 stack_pointer_delta -= pending_stack_adjust,
9482 pending_stack_adjust = 0;
9484 #endif
9487 /* Pop any previously-pushed arguments that have not been popped yet. */
9489 void
9490 do_pending_stack_adjust ()
9492 if (inhibit_defer_pop == 0)
9494 if (pending_stack_adjust != 0)
9495 adjust_stack (GEN_INT (pending_stack_adjust));
9496 pending_stack_adjust = 0;
9500 /* Expand conditional expressions. */
9502 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9503 LABEL is an rtx of code CODE_LABEL, in this function and all the
9504 functions here. */
9506 void
9507 jumpifnot (exp, label)
9508 tree exp;
9509 rtx label;
9511 do_jump (exp, label, NULL_RTX);
9514 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9516 void
9517 jumpif (exp, label)
9518 tree exp;
9519 rtx label;
9521 do_jump (exp, NULL_RTX, label);
9524 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9525 the result is zero, or IF_TRUE_LABEL if the result is one.
9526 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9527 meaning fall through in that case.
9529 do_jump always does any pending stack adjust except when it does not
9530 actually perform a jump. An example where there is no jump
9531 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9533 This function is responsible for optimizing cases such as
9534 &&, || and comparison operators in EXP. */
9536 void
9537 do_jump (exp, if_false_label, if_true_label)
9538 tree exp;
9539 rtx if_false_label, if_true_label;
9541 enum tree_code code = TREE_CODE (exp);
9542 /* Some cases need to create a label to jump to
9543 in order to properly fall through.
9544 These cases set DROP_THROUGH_LABEL nonzero. */
9545 rtx drop_through_label = 0;
9546 rtx temp;
9547 int i;
9548 tree type;
9549 enum machine_mode mode;
9551 #ifdef MAX_INTEGER_COMPUTATION_MODE
9552 check_max_integer_computation_mode (exp);
9553 #endif
9555 emit_queue ();
9557 switch (code)
9559 case ERROR_MARK:
9560 break;
9562 case INTEGER_CST:
9563 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9564 if (temp)
9565 emit_jump (temp);
9566 break;
9568 #if 0
9569 /* This is not true with #pragma weak */
9570 case ADDR_EXPR:
9571 /* The address of something can never be zero. */
9572 if (if_true_label)
9573 emit_jump (if_true_label);
9574 break;
9575 #endif
9577 case NOP_EXPR:
9578 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9579 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9580 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9581 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9582 goto normal;
9583 case CONVERT_EXPR:
9584 /* If we are narrowing the operand, we have to do the compare in the
9585 narrower mode. */
9586 if ((TYPE_PRECISION (TREE_TYPE (exp))
9587 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9588 goto normal;
9589 case NON_LVALUE_EXPR:
9590 case REFERENCE_EXPR:
9591 case ABS_EXPR:
9592 case NEGATE_EXPR:
9593 case LROTATE_EXPR:
9594 case RROTATE_EXPR:
9595 /* These cannot change zero->non-zero or vice versa. */
9596 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9597 break;
9599 case WITH_RECORD_EXPR:
9600 /* Put the object on the placeholder list, recurse through our first
9601 operand, and pop the list. */
9602 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9603 placeholder_list);
9604 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9605 placeholder_list = TREE_CHAIN (placeholder_list);
9606 break;
9608 #if 0
9609 /* This is never less insns than evaluating the PLUS_EXPR followed by
9610 a test and can be longer if the test is eliminated. */
9611 case PLUS_EXPR:
9612 /* Reduce to minus. */
9613 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9614 TREE_OPERAND (exp, 0),
9615 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9616 TREE_OPERAND (exp, 1))));
9617 /* Process as MINUS. */
9618 #endif
9620 case MINUS_EXPR:
9621 /* Non-zero iff operands of minus differ. */
9622 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9623 TREE_OPERAND (exp, 0),
9624 TREE_OPERAND (exp, 1)),
9625 NE, NE, if_false_label, if_true_label);
9626 break;
9628 case BIT_AND_EXPR:
9629 /* If we are AND'ing with a small constant, do this comparison in the
9630 smallest type that fits. If the machine doesn't have comparisons
9631 that small, it will be converted back to the wider comparison.
9632 This helps if we are testing the sign bit of a narrower object.
9633 combine can't do this for us because it can't know whether a
9634 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9636 if (! SLOW_BYTE_ACCESS
9637 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9638 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9639 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9640 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9641 && (type = type_for_mode (mode, 1)) != 0
9642 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9643 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9644 != CODE_FOR_nothing))
9646 do_jump (convert (type, exp), if_false_label, if_true_label);
9647 break;
9649 goto normal;
9651 case TRUTH_NOT_EXPR:
9652 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9653 break;
9655 case TRUTH_ANDIF_EXPR:
9656 if (if_false_label == 0)
9657 if_false_label = drop_through_label = gen_label_rtx ();
9658 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9659 start_cleanup_deferral ();
9660 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9661 end_cleanup_deferral ();
9662 break;
9664 case TRUTH_ORIF_EXPR:
9665 if (if_true_label == 0)
9666 if_true_label = drop_through_label = gen_label_rtx ();
9667 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9668 start_cleanup_deferral ();
9669 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9670 end_cleanup_deferral ();
9671 break;
9673 case COMPOUND_EXPR:
9674 push_temp_slots ();
9675 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9676 preserve_temp_slots (NULL_RTX);
9677 free_temp_slots ();
9678 pop_temp_slots ();
9679 emit_queue ();
9680 do_pending_stack_adjust ();
9681 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9682 break;
9684 case COMPONENT_REF:
9685 case BIT_FIELD_REF:
9686 case ARRAY_REF:
9687 case ARRAY_RANGE_REF:
9689 HOST_WIDE_INT bitsize, bitpos;
9690 int unsignedp;
9691 enum machine_mode mode;
9692 tree type;
9693 tree offset;
9694 int volatilep = 0;
9695 unsigned int alignment;
9697 /* Get description of this reference. We don't actually care
9698 about the underlying object here. */
9699 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9700 &unsignedp, &volatilep, &alignment);
9702 type = type_for_size (bitsize, unsignedp);
9703 if (! SLOW_BYTE_ACCESS
9704 && type != 0 && bitsize >= 0
9705 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9706 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9707 != CODE_FOR_nothing))
9709 do_jump (convert (type, exp), if_false_label, if_true_label);
9710 break;
9712 goto normal;
9715 case COND_EXPR:
9716 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9717 if (integer_onep (TREE_OPERAND (exp, 1))
9718 && integer_zerop (TREE_OPERAND (exp, 2)))
9719 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9721 else if (integer_zerop (TREE_OPERAND (exp, 1))
9722 && integer_onep (TREE_OPERAND (exp, 2)))
9723 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9725 else
9727 rtx label1 = gen_label_rtx ();
9728 drop_through_label = gen_label_rtx ();
9730 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9732 start_cleanup_deferral ();
9733 /* Now the THEN-expression. */
9734 do_jump (TREE_OPERAND (exp, 1),
9735 if_false_label ? if_false_label : drop_through_label,
9736 if_true_label ? if_true_label : drop_through_label);
9737 /* In case the do_jump just above never jumps. */
9738 do_pending_stack_adjust ();
9739 emit_label (label1);
9741 /* Now the ELSE-expression. */
9742 do_jump (TREE_OPERAND (exp, 2),
9743 if_false_label ? if_false_label : drop_through_label,
9744 if_true_label ? if_true_label : drop_through_label);
9745 end_cleanup_deferral ();
9747 break;
9749 case EQ_EXPR:
9751 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9753 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9754 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9756 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9757 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9758 do_jump
9759 (fold
9760 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9761 fold (build (EQ_EXPR, TREE_TYPE (exp),
9762 fold (build1 (REALPART_EXPR,
9763 TREE_TYPE (inner_type),
9764 exp0)),
9765 fold (build1 (REALPART_EXPR,
9766 TREE_TYPE (inner_type),
9767 exp1)))),
9768 fold (build (EQ_EXPR, TREE_TYPE (exp),
9769 fold (build1 (IMAGPART_EXPR,
9770 TREE_TYPE (inner_type),
9771 exp0)),
9772 fold (build1 (IMAGPART_EXPR,
9773 TREE_TYPE (inner_type),
9774 exp1)))))),
9775 if_false_label, if_true_label);
9778 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9779 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9781 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9782 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9783 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9784 else
9785 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9786 break;
9789 case NE_EXPR:
9791 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9793 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9794 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9796 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9797 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9798 do_jump
9799 (fold
9800 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9801 fold (build (NE_EXPR, TREE_TYPE (exp),
9802 fold (build1 (REALPART_EXPR,
9803 TREE_TYPE (inner_type),
9804 exp0)),
9805 fold (build1 (REALPART_EXPR,
9806 TREE_TYPE (inner_type),
9807 exp1)))),
9808 fold (build (NE_EXPR, TREE_TYPE (exp),
9809 fold (build1 (IMAGPART_EXPR,
9810 TREE_TYPE (inner_type),
9811 exp0)),
9812 fold (build1 (IMAGPART_EXPR,
9813 TREE_TYPE (inner_type),
9814 exp1)))))),
9815 if_false_label, if_true_label);
9818 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9819 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9821 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9822 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9823 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9824 else
9825 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9826 break;
9829 case LT_EXPR:
9830 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9831 if (GET_MODE_CLASS (mode) == MODE_INT
9832 && ! can_compare_p (LT, mode, ccp_jump))
9833 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9834 else
9835 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9836 break;
9838 case LE_EXPR:
9839 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9840 if (GET_MODE_CLASS (mode) == MODE_INT
9841 && ! can_compare_p (LE, mode, ccp_jump))
9842 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9843 else
9844 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9845 break;
9847 case GT_EXPR:
9848 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9849 if (GET_MODE_CLASS (mode) == MODE_INT
9850 && ! can_compare_p (GT, mode, ccp_jump))
9851 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9852 else
9853 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9854 break;
9856 case GE_EXPR:
9857 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9858 if (GET_MODE_CLASS (mode) == MODE_INT
9859 && ! can_compare_p (GE, mode, ccp_jump))
9860 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9861 else
9862 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9863 break;
9865 case UNORDERED_EXPR:
9866 case ORDERED_EXPR:
9868 enum rtx_code cmp, rcmp;
9869 int do_rev;
9871 if (code == UNORDERED_EXPR)
9872 cmp = UNORDERED, rcmp = ORDERED;
9873 else
9874 cmp = ORDERED, rcmp = UNORDERED;
9875 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9877 do_rev = 0;
9878 if (! can_compare_p (cmp, mode, ccp_jump)
9879 && (can_compare_p (rcmp, mode, ccp_jump)
9880 /* If the target doesn't provide either UNORDERED or ORDERED
9881 comparisons, canonicalize on UNORDERED for the library. */
9882 || rcmp == UNORDERED))
9883 do_rev = 1;
9885 if (! do_rev)
9886 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9887 else
9888 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9890 break;
9893 enum rtx_code rcode1;
9894 enum tree_code tcode2;
9896 case UNLT_EXPR:
9897 rcode1 = UNLT;
9898 tcode2 = LT_EXPR;
9899 goto unordered_bcc;
9900 case UNLE_EXPR:
9901 rcode1 = UNLE;
9902 tcode2 = LE_EXPR;
9903 goto unordered_bcc;
9904 case UNGT_EXPR:
9905 rcode1 = UNGT;
9906 tcode2 = GT_EXPR;
9907 goto unordered_bcc;
9908 case UNGE_EXPR:
9909 rcode1 = UNGE;
9910 tcode2 = GE_EXPR;
9911 goto unordered_bcc;
9912 case UNEQ_EXPR:
9913 rcode1 = UNEQ;
9914 tcode2 = EQ_EXPR;
9915 goto unordered_bcc;
9917 unordered_bcc:
9918 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9919 if (can_compare_p (rcode1, mode, ccp_jump))
9920 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9921 if_true_label);
9922 else
9924 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9925 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9926 tree cmp0, cmp1;
9928 /* If the target doesn't support combined unordered
9929 compares, decompose into UNORDERED + comparison. */
9930 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9931 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9932 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9933 do_jump (exp, if_false_label, if_true_label);
9936 break;
9938 /* Special case:
9939 __builtin_expect (<test>, 0) and
9940 __builtin_expect (<test>, 1)
9942 We need to do this here, so that <test> is not converted to a SCC
9943 operation on machines that use condition code registers and COMPARE
9944 like the PowerPC, and then the jump is done based on whether the SCC
9945 operation produced a 1 or 0. */
9946 case CALL_EXPR:
9947 /* Check for a built-in function. */
9948 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9950 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9951 tree arglist = TREE_OPERAND (exp, 1);
9953 if (TREE_CODE (fndecl) == FUNCTION_DECL
9954 && DECL_BUILT_IN (fndecl)
9955 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9956 && arglist != NULL_TREE
9957 && TREE_CHAIN (arglist) != NULL_TREE)
9959 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9960 if_true_label);
9962 if (seq != NULL_RTX)
9964 emit_insn (seq);
9965 return;
9969 /* fall through and generate the normal code. */
9971 default:
9972 normal:
9973 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9974 #if 0
9975 /* This is not needed any more and causes poor code since it causes
9976 comparisons and tests from non-SI objects to have different code
9977 sequences. */
9978 /* Copy to register to avoid generating bad insns by cse
9979 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9980 if (!cse_not_expected && GET_CODE (temp) == MEM)
9981 temp = copy_to_reg (temp);
9982 #endif
9983 do_pending_stack_adjust ();
9984 /* Do any postincrements in the expression that was tested. */
9985 emit_queue ();
9987 if (GET_CODE (temp) == CONST_INT
9988 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9989 || GET_CODE (temp) == LABEL_REF)
9991 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9992 if (target)
9993 emit_jump (target);
9995 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9996 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9997 /* Note swapping the labels gives us not-equal. */
9998 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9999 else if (GET_MODE (temp) != VOIDmode)
10000 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
10001 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10002 GET_MODE (temp), NULL_RTX, 0,
10003 if_false_label, if_true_label);
10004 else
10005 abort ();
10008 if (drop_through_label)
10010 /* If do_jump produces code that might be jumped around,
10011 do any stack adjusts from that code, before the place
10012 where control merges in. */
10013 do_pending_stack_adjust ();
10014 emit_label (drop_through_label);
10018 /* Given a comparison expression EXP for values too wide to be compared
10019 with one insn, test the comparison and jump to the appropriate label.
10020 The code of EXP is ignored; we always test GT if SWAP is 0,
10021 and LT if SWAP is 1. */
10023 static void
10024 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10025 tree exp;
10026 int swap;
10027 rtx if_false_label, if_true_label;
10029 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10030 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10031 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10032 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10034 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
10037 /* Compare OP0 with OP1, word at a time, in mode MODE.
10038 UNSIGNEDP says to do unsigned comparison.
10039 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10041 void
10042 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10043 enum machine_mode mode;
10044 int unsignedp;
10045 rtx op0, op1;
10046 rtx if_false_label, if_true_label;
10048 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10049 rtx drop_through_label = 0;
10050 int i;
10052 if (! if_true_label || ! if_false_label)
10053 drop_through_label = gen_label_rtx ();
10054 if (! if_true_label)
10055 if_true_label = drop_through_label;
10056 if (! if_false_label)
10057 if_false_label = drop_through_label;
10059 /* Compare a word at a time, high order first. */
10060 for (i = 0; i < nwords; i++)
10062 rtx op0_word, op1_word;
10064 if (WORDS_BIG_ENDIAN)
10066 op0_word = operand_subword_force (op0, i, mode);
10067 op1_word = operand_subword_force (op1, i, mode);
10069 else
10071 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10072 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10075 /* All but high-order word must be compared as unsigned. */
10076 do_compare_rtx_and_jump (op0_word, op1_word, GT,
10077 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
10078 NULL_RTX, if_true_label);
10080 /* Consider lower words only if these are equal. */
10081 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
10082 NULL_RTX, 0, NULL_RTX, if_false_label);
10085 if (if_false_label)
10086 emit_jump (if_false_label);
10087 if (drop_through_label)
10088 emit_label (drop_through_label);
10091 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10092 with one insn, test the comparison and jump to the appropriate label. */
10094 static void
10095 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10096 tree exp;
10097 rtx if_false_label, if_true_label;
10099 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10100 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10101 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10102 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10103 int i;
10104 rtx drop_through_label = 0;
10106 if (! if_false_label)
10107 drop_through_label = if_false_label = gen_label_rtx ();
10109 for (i = 0; i < nwords; i++)
10110 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
10111 operand_subword_force (op1, i, mode),
10112 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10113 word_mode, NULL_RTX, 0, if_false_label,
10114 NULL_RTX);
10116 if (if_true_label)
10117 emit_jump (if_true_label);
10118 if (drop_through_label)
10119 emit_label (drop_through_label);
10122 /* Jump according to whether OP0 is 0.
10123 We assume that OP0 has an integer mode that is too wide
10124 for the available compare insns. */
10126 void
10127 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10128 rtx op0;
10129 rtx if_false_label, if_true_label;
10131 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10132 rtx part;
10133 int i;
10134 rtx drop_through_label = 0;
10136 /* The fastest way of doing this comparison on almost any machine is to
10137 "or" all the words and compare the result. If all have to be loaded
10138 from memory and this is a very wide item, it's possible this may
10139 be slower, but that's highly unlikely. */
10141 part = gen_reg_rtx (word_mode);
10142 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10143 for (i = 1; i < nwords && part != 0; i++)
10144 part = expand_binop (word_mode, ior_optab, part,
10145 operand_subword_force (op0, i, GET_MODE (op0)),
10146 part, 1, OPTAB_WIDEN);
10148 if (part != 0)
10150 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10151 NULL_RTX, 0, if_false_label, if_true_label);
10153 return;
10156 /* If we couldn't do the "or" simply, do this with a series of compares. */
10157 if (! if_false_label)
10158 drop_through_label = if_false_label = gen_label_rtx ();
10160 for (i = 0; i < nwords; i++)
10161 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10162 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
10163 if_false_label, NULL_RTX);
10165 if (if_true_label)
10166 emit_jump (if_true_label);
10168 if (drop_through_label)
10169 emit_label (drop_through_label);
10172 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10173 (including code to compute the values to be compared)
10174 and set (CC0) according to the result.
10175 The decision as to signed or unsigned comparison must be made by the caller.
10177 We force a stack adjustment unless there are currently
10178 things pushed on the stack that aren't yet used.
10180 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10181 compared.
10183 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10184 size of MODE should be used. */
10187 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10188 rtx op0, op1;
10189 enum rtx_code code;
10190 int unsignedp;
10191 enum machine_mode mode;
10192 rtx size;
10193 unsigned int align;
10195 rtx tem;
10197 /* If one operand is constant, make it the second one. Only do this
10198 if the other operand is not constant as well. */
10200 if (swap_commutative_operands_p (op0, op1))
10202 tem = op0;
10203 op0 = op1;
10204 op1 = tem;
10205 code = swap_condition (code);
10208 if (flag_force_mem)
10210 op0 = force_not_mem (op0);
10211 op1 = force_not_mem (op1);
10214 do_pending_stack_adjust ();
10216 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10217 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10218 return tem;
10220 #if 0
10221 /* There's no need to do this now that combine.c can eliminate lots of
10222 sign extensions. This can be less efficient in certain cases on other
10223 machines. */
10225 /* If this is a signed equality comparison, we can do it as an
10226 unsigned comparison since zero-extension is cheaper than sign
10227 extension and comparisons with zero are done as unsigned. This is
10228 the case even on machines that can do fast sign extension, since
10229 zero-extension is easier to combine with other operations than
10230 sign-extension is. If we are comparing against a constant, we must
10231 convert it to what it would look like unsigned. */
10232 if ((code == EQ || code == NE) && ! unsignedp
10233 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10235 if (GET_CODE (op1) == CONST_INT
10236 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10237 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10238 unsignedp = 1;
10240 #endif
10242 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10244 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10247 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10248 The decision as to signed or unsigned comparison must be made by the caller.
10250 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10251 compared.
10253 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10254 size of MODE should be used. */
10256 void
10257 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
10258 if_false_label, if_true_label)
10259 rtx op0, op1;
10260 enum rtx_code code;
10261 int unsignedp;
10262 enum machine_mode mode;
10263 rtx size;
10264 unsigned int align;
10265 rtx if_false_label, if_true_label;
10267 rtx tem;
10268 int dummy_true_label = 0;
10270 /* Reverse the comparison if that is safe and we want to jump if it is
10271 false. */
10272 if (! if_true_label && ! FLOAT_MODE_P (mode))
10274 if_true_label = if_false_label;
10275 if_false_label = 0;
10276 code = reverse_condition (code);
10279 /* If one operand is constant, make it the second one. Only do this
10280 if the other operand is not constant as well. */
10282 if (swap_commutative_operands_p (op0, op1))
10284 tem = op0;
10285 op0 = op1;
10286 op1 = tem;
10287 code = swap_condition (code);
10290 if (flag_force_mem)
10292 op0 = force_not_mem (op0);
10293 op1 = force_not_mem (op1);
10296 do_pending_stack_adjust ();
10298 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10299 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10301 if (tem == const_true_rtx)
10303 if (if_true_label)
10304 emit_jump (if_true_label);
10306 else
10308 if (if_false_label)
10309 emit_jump (if_false_label);
10311 return;
10314 #if 0
10315 /* There's no need to do this now that combine.c can eliminate lots of
10316 sign extensions. This can be less efficient in certain cases on other
10317 machines. */
10319 /* If this is a signed equality comparison, we can do it as an
10320 unsigned comparison since zero-extension is cheaper than sign
10321 extension and comparisons with zero are done as unsigned. This is
10322 the case even on machines that can do fast sign extension, since
10323 zero-extension is easier to combine with other operations than
10324 sign-extension is. If we are comparing against a constant, we must
10325 convert it to what it would look like unsigned. */
10326 if ((code == EQ || code == NE) && ! unsignedp
10327 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10329 if (GET_CODE (op1) == CONST_INT
10330 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10331 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10332 unsignedp = 1;
10334 #endif
10336 if (! if_true_label)
10338 dummy_true_label = 1;
10339 if_true_label = gen_label_rtx ();
10342 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10343 if_true_label);
10345 if (if_false_label)
10346 emit_jump (if_false_label);
10347 if (dummy_true_label)
10348 emit_label (if_true_label);
10351 /* Generate code for a comparison expression EXP (including code to compute
10352 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10353 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10354 generated code will drop through.
10355 SIGNED_CODE should be the rtx operation for this comparison for
10356 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10358 We force a stack adjustment unless there are currently
10359 things pushed on the stack that aren't yet used. */
10361 static void
10362 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10363 if_true_label)
10364 tree exp;
10365 enum rtx_code signed_code, unsigned_code;
10366 rtx if_false_label, if_true_label;
10368 unsigned int align0, align1;
10369 rtx op0, op1;
10370 tree type;
10371 enum machine_mode mode;
10372 int unsignedp;
10373 enum rtx_code code;
10375 /* Don't crash if the comparison was erroneous. */
10376 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10377 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10378 return;
10380 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10381 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10382 return;
10384 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10385 mode = TYPE_MODE (type);
10386 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10387 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10388 || (GET_MODE_BITSIZE (mode)
10389 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10390 1)))))))
10392 /* op0 might have been replaced by promoted constant, in which
10393 case the type of second argument should be used. */
10394 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10395 mode = TYPE_MODE (type);
10397 unsignedp = TREE_UNSIGNED (type);
10398 code = unsignedp ? unsigned_code : signed_code;
10400 #ifdef HAVE_canonicalize_funcptr_for_compare
10401 /* If function pointers need to be "canonicalized" before they can
10402 be reliably compared, then canonicalize them. */
10403 if (HAVE_canonicalize_funcptr_for_compare
10404 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10405 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10406 == FUNCTION_TYPE))
10408 rtx new_op0 = gen_reg_rtx (mode);
10410 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10411 op0 = new_op0;
10414 if (HAVE_canonicalize_funcptr_for_compare
10415 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10416 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10417 == FUNCTION_TYPE))
10419 rtx new_op1 = gen_reg_rtx (mode);
10421 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10422 op1 = new_op1;
10424 #endif
10426 /* Do any postincrements in the expression that was tested. */
10427 emit_queue ();
10429 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10430 ((mode == BLKmode)
10431 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10432 MIN (align0, align1),
10433 if_false_label, if_true_label);
10436 /* Generate code to calculate EXP using a store-flag instruction
10437 and return an rtx for the result. EXP is either a comparison
10438 or a TRUTH_NOT_EXPR whose operand is a comparison.
10440 If TARGET is nonzero, store the result there if convenient.
10442 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10443 cheap.
10445 Return zero if there is no suitable set-flag instruction
10446 available on this machine.
10448 Once expand_expr has been called on the arguments of the comparison,
10449 we are committed to doing the store flag, since it is not safe to
10450 re-evaluate the expression. We emit the store-flag insn by calling
10451 emit_store_flag, but only expand the arguments if we have a reason
10452 to believe that emit_store_flag will be successful. If we think that
10453 it will, but it isn't, we have to simulate the store-flag with a
10454 set/jump/set sequence. */
10456 static rtx
10457 do_store_flag (exp, target, mode, only_cheap)
10458 tree exp;
10459 rtx target;
10460 enum machine_mode mode;
10461 int only_cheap;
10463 enum rtx_code code;
10464 tree arg0, arg1, type;
10465 tree tem;
10466 enum machine_mode operand_mode;
10467 int invert = 0;
10468 int unsignedp;
10469 rtx op0, op1;
10470 enum insn_code icode;
10471 rtx subtarget = target;
10472 rtx result, label;
10474 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10475 result at the end. We can't simply invert the test since it would
10476 have already been inverted if it were valid. This case occurs for
10477 some floating-point comparisons. */
10479 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10480 invert = 1, exp = TREE_OPERAND (exp, 0);
10482 arg0 = TREE_OPERAND (exp, 0);
10483 arg1 = TREE_OPERAND (exp, 1);
10485 /* Don't crash if the comparison was erroneous. */
10486 if (arg0 == error_mark_node || arg1 == error_mark_node)
10487 return const0_rtx;
10489 type = TREE_TYPE (arg0);
10490 operand_mode = TYPE_MODE (type);
10491 unsignedp = TREE_UNSIGNED (type);
10493 /* We won't bother with BLKmode store-flag operations because it would mean
10494 passing a lot of information to emit_store_flag. */
10495 if (operand_mode == BLKmode)
10496 return 0;
10498 /* We won't bother with store-flag operations involving function pointers
10499 when function pointers must be canonicalized before comparisons. */
10500 #ifdef HAVE_canonicalize_funcptr_for_compare
10501 if (HAVE_canonicalize_funcptr_for_compare
10502 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10503 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10504 == FUNCTION_TYPE))
10505 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10506 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10507 == FUNCTION_TYPE))))
10508 return 0;
10509 #endif
10511 STRIP_NOPS (arg0);
10512 STRIP_NOPS (arg1);
10514 /* Get the rtx comparison code to use. We know that EXP is a comparison
10515 operation of some type. Some comparisons against 1 and -1 can be
10516 converted to comparisons with zero. Do so here so that the tests
10517 below will be aware that we have a comparison with zero. These
10518 tests will not catch constants in the first operand, but constants
10519 are rarely passed as the first operand. */
10521 switch (TREE_CODE (exp))
10523 case EQ_EXPR:
10524 code = EQ;
10525 break;
10526 case NE_EXPR:
10527 code = NE;
10528 break;
10529 case LT_EXPR:
10530 if (integer_onep (arg1))
10531 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10532 else
10533 code = unsignedp ? LTU : LT;
10534 break;
10535 case LE_EXPR:
10536 if (! unsignedp && integer_all_onesp (arg1))
10537 arg1 = integer_zero_node, code = LT;
10538 else
10539 code = unsignedp ? LEU : LE;
10540 break;
10541 case GT_EXPR:
10542 if (! unsignedp && integer_all_onesp (arg1))
10543 arg1 = integer_zero_node, code = GE;
10544 else
10545 code = unsignedp ? GTU : GT;
10546 break;
10547 case GE_EXPR:
10548 if (integer_onep (arg1))
10549 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10550 else
10551 code = unsignedp ? GEU : GE;
10552 break;
10554 case UNORDERED_EXPR:
10555 code = UNORDERED;
10556 break;
10557 case ORDERED_EXPR:
10558 code = ORDERED;
10559 break;
10560 case UNLT_EXPR:
10561 code = UNLT;
10562 break;
10563 case UNLE_EXPR:
10564 code = UNLE;
10565 break;
10566 case UNGT_EXPR:
10567 code = UNGT;
10568 break;
10569 case UNGE_EXPR:
10570 code = UNGE;
10571 break;
10572 case UNEQ_EXPR:
10573 code = UNEQ;
10574 break;
10576 default:
10577 abort ();
10580 /* Put a constant second. */
10581 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10583 tem = arg0; arg0 = arg1; arg1 = tem;
10584 code = swap_condition (code);
10587 /* If this is an equality or inequality test of a single bit, we can
10588 do this by shifting the bit being tested to the low-order bit and
10589 masking the result with the constant 1. If the condition was EQ,
10590 we xor it with 1. This does not require an scc insn and is faster
10591 than an scc insn even if we have it. */
10593 if ((code == NE || code == EQ)
10594 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10595 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10597 tree inner = TREE_OPERAND (arg0, 0);
10598 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10599 int ops_unsignedp;
10601 /* If INNER is a right shift of a constant and it plus BITNUM does
10602 not overflow, adjust BITNUM and INNER. */
10604 if (TREE_CODE (inner) == RSHIFT_EXPR
10605 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10606 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10607 && bitnum < TYPE_PRECISION (type)
10608 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10609 bitnum - TYPE_PRECISION (type)))
10611 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10612 inner = TREE_OPERAND (inner, 0);
10615 /* If we are going to be able to omit the AND below, we must do our
10616 operations as unsigned. If we must use the AND, we have a choice.
10617 Normally unsigned is faster, but for some machines signed is. */
10618 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10619 #ifdef LOAD_EXTEND_OP
10620 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10621 #else
10623 #endif
10626 if (! get_subtarget (subtarget)
10627 || GET_MODE (subtarget) != operand_mode
10628 || ! safe_from_p (subtarget, inner, 1))
10629 subtarget = 0;
10631 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10633 if (bitnum != 0)
10634 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10635 size_int (bitnum), subtarget, ops_unsignedp);
10637 if (GET_MODE (op0) != mode)
10638 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10640 if ((code == EQ && ! invert) || (code == NE && invert))
10641 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10642 ops_unsignedp, OPTAB_LIB_WIDEN);
10644 /* Put the AND last so it can combine with more things. */
10645 if (bitnum != TYPE_PRECISION (type) - 1)
10646 op0 = expand_and (op0, const1_rtx, subtarget);
10648 return op0;
10651 /* Now see if we are likely to be able to do this. Return if not. */
10652 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10653 return 0;
10655 icode = setcc_gen_code[(int) code];
10656 if (icode == CODE_FOR_nothing
10657 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10659 /* We can only do this if it is one of the special cases that
10660 can be handled without an scc insn. */
10661 if ((code == LT && integer_zerop (arg1))
10662 || (! only_cheap && code == GE && integer_zerop (arg1)))
10664 else if (BRANCH_COST >= 0
10665 && ! only_cheap && (code == NE || code == EQ)
10666 && TREE_CODE (type) != REAL_TYPE
10667 && ((abs_optab->handlers[(int) operand_mode].insn_code
10668 != CODE_FOR_nothing)
10669 || (ffs_optab->handlers[(int) operand_mode].insn_code
10670 != CODE_FOR_nothing)))
10672 else
10673 return 0;
10676 if (! get_subtarget (target)
10677 || GET_MODE (subtarget) != operand_mode
10678 || ! safe_from_p (subtarget, arg1, 1))
10679 subtarget = 0;
10681 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10682 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10684 if (target == 0)
10685 target = gen_reg_rtx (mode);
10687 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10688 because, if the emit_store_flag does anything it will succeed and
10689 OP0 and OP1 will not be used subsequently. */
10691 result = emit_store_flag (target, code,
10692 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10693 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10694 operand_mode, unsignedp, 1);
10696 if (result)
10698 if (invert)
10699 result = expand_binop (mode, xor_optab, result, const1_rtx,
10700 result, 0, OPTAB_LIB_WIDEN);
10701 return result;
10704 /* If this failed, we have to do this with set/compare/jump/set code. */
10705 if (GET_CODE (target) != REG
10706 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10707 target = gen_reg_rtx (GET_MODE (target));
10709 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10710 result = compare_from_rtx (op0, op1, code, unsignedp,
10711 operand_mode, NULL_RTX, 0);
10712 if (GET_CODE (result) == CONST_INT)
10713 return (((result == const0_rtx && ! invert)
10714 || (result != const0_rtx && invert))
10715 ? const0_rtx : const1_rtx);
10717 label = gen_label_rtx ();
10718 if (bcc_gen_fctn[(int) code] == 0)
10719 abort ();
10721 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10722 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10723 emit_label (label);
10725 return target;
10729 /* Stubs in case we haven't got a casesi insn. */
10730 #ifndef HAVE_casesi
10731 # define HAVE_casesi 0
10732 # define gen_casesi(a, b, c, d, e) (0)
10733 # define CODE_FOR_casesi CODE_FOR_nothing
10734 #endif
10736 /* If the machine does not have a case insn that compares the bounds,
10737 this means extra overhead for dispatch tables, which raises the
10738 threshold for using them. */
10739 #ifndef CASE_VALUES_THRESHOLD
10740 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10741 #endif /* CASE_VALUES_THRESHOLD */
10743 unsigned int
10744 case_values_threshold ()
10746 return CASE_VALUES_THRESHOLD;
10749 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10750 0 otherwise (i.e. if there is no casesi instruction). */
10752 try_casesi (index_type, index_expr, minval, range,
10753 table_label, default_label)
10754 tree index_type, index_expr, minval, range;
10755 rtx table_label ATTRIBUTE_UNUSED;
10756 rtx default_label;
10758 enum machine_mode index_mode = SImode;
10759 int index_bits = GET_MODE_BITSIZE (index_mode);
10760 rtx op1, op2, index;
10761 enum machine_mode op_mode;
10763 if (! HAVE_casesi)
10764 return 0;
10766 /* Convert the index to SImode. */
10767 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10769 enum machine_mode omode = TYPE_MODE (index_type);
10770 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10772 /* We must handle the endpoints in the original mode. */
10773 index_expr = build (MINUS_EXPR, index_type,
10774 index_expr, minval);
10775 minval = integer_zero_node;
10776 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10777 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10778 omode, 1, 0, default_label);
10779 /* Now we can safely truncate. */
10780 index = convert_to_mode (index_mode, index, 0);
10782 else
10784 if (TYPE_MODE (index_type) != index_mode)
10786 index_expr = convert (type_for_size (index_bits, 0),
10787 index_expr);
10788 index_type = TREE_TYPE (index_expr);
10791 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10793 emit_queue ();
10794 index = protect_from_queue (index, 0);
10795 do_pending_stack_adjust ();
10797 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10798 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10799 (index, op_mode))
10800 index = copy_to_mode_reg (op_mode, index);
10802 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10804 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10805 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10806 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10807 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10808 (op1, op_mode))
10809 op1 = copy_to_mode_reg (op_mode, op1);
10811 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10813 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10814 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10815 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10816 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10817 (op2, op_mode))
10818 op2 = copy_to_mode_reg (op_mode, op2);
10820 emit_jump_insn (gen_casesi (index, op1, op2,
10821 table_label, default_label));
10822 return 1;
10825 /* Attempt to generate a tablejump instruction; same concept. */
10826 #ifndef HAVE_tablejump
10827 #define HAVE_tablejump 0
10828 #define gen_tablejump(x, y) (0)
10829 #endif
10831 /* Subroutine of the next function.
10833 INDEX is the value being switched on, with the lowest value
10834 in the table already subtracted.
10835 MODE is its expected mode (needed if INDEX is constant).
10836 RANGE is the length of the jump table.
10837 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10839 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10840 index value is out of range. */
10842 static void
10843 do_tablejump (index, mode, range, table_label, default_label)
10844 rtx index, range, table_label, default_label;
10845 enum machine_mode mode;
10847 rtx temp, vector;
10849 /* Do an unsigned comparison (in the proper mode) between the index
10850 expression and the value which represents the length of the range.
10851 Since we just finished subtracting the lower bound of the range
10852 from the index expression, this comparison allows us to simultaneously
10853 check that the original index expression value is both greater than
10854 or equal to the minimum value of the range and less than or equal to
10855 the maximum value of the range. */
10857 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10858 0, default_label);
10860 /* If index is in range, it must fit in Pmode.
10861 Convert to Pmode so we can index with it. */
10862 if (mode != Pmode)
10863 index = convert_to_mode (Pmode, index, 1);
10865 /* Don't let a MEM slip thru, because then INDEX that comes
10866 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10867 and break_out_memory_refs will go to work on it and mess it up. */
10868 #ifdef PIC_CASE_VECTOR_ADDRESS
10869 if (flag_pic && GET_CODE (index) != REG)
10870 index = copy_to_mode_reg (Pmode, index);
10871 #endif
10873 /* If flag_force_addr were to affect this address
10874 it could interfere with the tricky assumptions made
10875 about addresses that contain label-refs,
10876 which may be valid only very near the tablejump itself. */
10877 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10878 GET_MODE_SIZE, because this indicates how large insns are. The other
10879 uses should all be Pmode, because they are addresses. This code
10880 could fail if addresses and insns are not the same size. */
10881 index = gen_rtx_PLUS (Pmode,
10882 gen_rtx_MULT (Pmode, index,
10883 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10884 gen_rtx_LABEL_REF (Pmode, table_label));
10885 #ifdef PIC_CASE_VECTOR_ADDRESS
10886 if (flag_pic)
10887 index = PIC_CASE_VECTOR_ADDRESS (index);
10888 else
10889 #endif
10890 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10891 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10892 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10893 RTX_UNCHANGING_P (vector) = 1;
10894 convert_move (temp, vector, 0);
10896 emit_jump_insn (gen_tablejump (temp, table_label));
10898 /* If we are generating PIC code or if the table is PC-relative, the
10899 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10900 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10901 emit_barrier ();
10905 try_tablejump (index_type, index_expr, minval, range,
10906 table_label, default_label)
10907 tree index_type, index_expr, minval, range;
10908 rtx table_label, default_label;
10910 rtx index;
10912 if (! HAVE_tablejump)
10913 return 0;
10915 index_expr = fold (build (MINUS_EXPR, index_type,
10916 convert (index_type, index_expr),
10917 convert (index_type, minval)));
10918 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10919 emit_queue ();
10920 index = protect_from_queue (index, 0);
10921 do_pending_stack_adjust ();
10923 do_tablejump (index, TYPE_MODE (index_type),
10924 convert_modes (TYPE_MODE (index_type),
10925 TYPE_MODE (TREE_TYPE (range)),
10926 expand_expr (range, NULL_RTX,
10927 VOIDmode, 0),
10928 TREE_UNSIGNED (TREE_TYPE (range))),
10929 table_label, default_label);
10930 return 1;