* expr.c (store_field): Don't set MEM_ALIAS_SET for a field
[official-gcc.git] / gcc / expr.c
blob4df3591dc3ad0d6fb978e664d4f032e8903092b3
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-config.h"
34 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
35 #include "expr.h"
36 #include "recog.h"
37 #include "reload.h"
38 #include "output.h"
39 #include "typeclass.h"
40 #include "toplev.h"
41 #include "ggc.h"
42 #include "intl.h"
43 #include "tm_p.h"
45 /* Decide whether a function's arguments should be processed
46 from first to last or from last to first.
48 They should if the stack and args grow in opposite directions, but
49 only if we have push insns. */
51 #ifdef PUSH_ROUNDING
53 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
54 #define PUSH_ARGS_REVERSED /* If it's last to first. */
55 #endif
57 #endif
59 #ifndef STACK_PUSH_CODE
60 #ifdef STACK_GROWS_DOWNWARD
61 #define STACK_PUSH_CODE PRE_DEC
62 #else
63 #define STACK_PUSH_CODE PRE_INC
64 #endif
65 #endif
67 /* Assume that case vectors are not pc-relative. */
68 #ifndef CASE_VECTOR_PC_RELATIVE
69 #define CASE_VECTOR_PC_RELATIVE 0
70 #endif
72 /* Hook called by safe_from_p for language-specific tree codes. It is
73 up to the language front-end to install a hook if it has any such
74 codes that safe_from_p needs to know about. Since same_from_p will
75 recursively explore the TREE_OPERANDs of an expression, this hook
76 should not reexamine those pieces. This routine may recursively
77 call safe_from_p; it should always pass `0' as the TOP_P
78 parameter. */
79 int (*lang_safe_from_p) PARAMS ((rtx, tree));
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
87 int cse_not_expected;
89 /* Don't check memory usage, since code is being emitted to check a memory
90 usage. Used when current_function_check_memory_usage is true, to avoid
91 infinite recursion. */
92 static int in_check_memory_usage;
94 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
95 static tree placeholder_list = 0;
97 /* This structure is used by move_by_pieces to describe the move to
98 be performed. */
99 struct move_by_pieces
101 rtx to;
102 rtx to_addr;
103 int autinc_to;
104 int explicit_inc_to;
105 rtx from;
106 rtx from_addr;
107 int autinc_from;
108 int explicit_inc_from;
109 unsigned HOST_WIDE_INT len;
110 HOST_WIDE_INT offset;
111 int reverse;
114 /* This structure is used by store_by_pieces to describe the clear to
115 be performed. */
117 struct store_by_pieces
119 rtx to;
120 rtx to_addr;
121 int autinc_to;
122 int explicit_inc_to;
123 unsigned HOST_WIDE_INT len;
124 HOST_WIDE_INT offset;
125 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
126 PTR constfundata;
127 int reverse;
130 extern struct obstack permanent_obstack;
132 static rtx get_push_address PARAMS ((int));
134 static rtx enqueue_insn PARAMS ((rtx, rtx));
135 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
136 PARAMS ((unsigned HOST_WIDE_INT,
137 unsigned int));
138 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
139 struct move_by_pieces *));
140 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
141 enum machine_mode));
142 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
143 unsigned int));
144 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
145 unsigned int));
146 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
147 enum machine_mode,
148 struct store_by_pieces *));
149 static rtx get_subtarget PARAMS ((rtx));
150 static int is_zeros_p PARAMS ((tree));
151 static int mostly_zeros_p PARAMS ((tree));
152 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
153 HOST_WIDE_INT, enum machine_mode,
154 tree, tree, unsigned int, int,
155 int));
156 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
157 HOST_WIDE_INT));
158 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
159 HOST_WIDE_INT, enum machine_mode,
160 tree, enum machine_mode, int,
161 unsigned int, HOST_WIDE_INT, int));
162 static enum memory_use_mode
163 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
164 static tree save_noncopied_parts PARAMS ((tree, tree));
165 static tree init_noncopied_parts PARAMS ((tree, tree));
166 static int fixed_type_p PARAMS ((tree));
167 static rtx var_rtx PARAMS ((tree));
168 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
169 static rtx expand_increment PARAMS ((tree, int, int));
170 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
171 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
172 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
173 rtx, rtx));
174 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
176 /* Record for each mode whether we can move a register directly to or
177 from an object of that mode in memory. If we can't, we won't try
178 to use that mode directly when accessing a field of that mode. */
180 static char direct_load[NUM_MACHINE_MODES];
181 static char direct_store[NUM_MACHINE_MODES];
183 /* If a memory-to-memory move would take MOVE_RATIO or more simple
184 move-instruction sequences, we will do a movstr or libcall instead. */
186 #ifndef MOVE_RATIO
187 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
188 #define MOVE_RATIO 2
189 #else
190 /* If we are optimizing for space (-Os), cut down the default move ratio. */
191 #define MOVE_RATIO (optimize_size ? 3 : 15)
192 #endif
193 #endif
195 /* This macro is used to determine whether move_by_pieces should be called
196 to perform a structure copy. */
197 #ifndef MOVE_BY_PIECES_P
198 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
199 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
200 #endif
202 /* This array records the insn_code of insns to perform block moves. */
203 enum insn_code movstr_optab[NUM_MACHINE_MODES];
205 /* This array records the insn_code of insns to perform block clears. */
206 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
208 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
210 #ifndef SLOW_UNALIGNED_ACCESS
211 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
212 #endif
214 /* This is run once per compilation to set up which modes can be used
215 directly in memory and to initialize the block move optab. */
217 void
218 init_expr_once ()
220 rtx insn, pat;
221 enum machine_mode mode;
222 int num_clobbers;
223 rtx mem, mem1;
225 start_sequence ();
227 /* Try indexing by frame ptr and try by stack ptr.
228 It is known that on the Convex the stack ptr isn't a valid index.
229 With luck, one or the other is valid on any machine. */
230 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
231 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
233 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
234 pat = PATTERN (insn);
236 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
237 mode = (enum machine_mode) ((int) mode + 1))
239 int regno;
240 rtx reg;
242 direct_load[(int) mode] = direct_store[(int) mode] = 0;
243 PUT_MODE (mem, mode);
244 PUT_MODE (mem1, mode);
246 /* See if there is some register that can be used in this mode and
247 directly loaded or stored from memory. */
249 if (mode != VOIDmode && mode != BLKmode)
250 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
251 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
252 regno++)
254 if (! HARD_REGNO_MODE_OK (regno, mode))
255 continue;
257 reg = gen_rtx_REG (mode, regno);
259 SET_SRC (pat) = mem;
260 SET_DEST (pat) = reg;
261 if (recog (pat, insn, &num_clobbers) >= 0)
262 direct_load[(int) mode] = 1;
264 SET_SRC (pat) = mem1;
265 SET_DEST (pat) = reg;
266 if (recog (pat, insn, &num_clobbers) >= 0)
267 direct_load[(int) mode] = 1;
269 SET_SRC (pat) = reg;
270 SET_DEST (pat) = mem;
271 if (recog (pat, insn, &num_clobbers) >= 0)
272 direct_store[(int) mode] = 1;
274 SET_SRC (pat) = reg;
275 SET_DEST (pat) = mem1;
276 if (recog (pat, insn, &num_clobbers) >= 0)
277 direct_store[(int) mode] = 1;
281 end_sequence ();
284 /* This is run at the start of compiling a function. */
286 void
287 init_expr ()
289 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
291 pending_chain = 0;
292 pending_stack_adjust = 0;
293 stack_pointer_delta = 0;
294 inhibit_defer_pop = 0;
295 saveregs_value = 0;
296 apply_args_value = 0;
297 forced_labels = 0;
300 void
301 mark_expr_status (p)
302 struct expr_status *p;
304 if (p == NULL)
305 return;
307 ggc_mark_rtx (p->x_saveregs_value);
308 ggc_mark_rtx (p->x_apply_args_value);
309 ggc_mark_rtx (p->x_forced_labels);
312 void
313 free_expr_status (f)
314 struct function *f;
316 free (f->expr);
317 f->expr = NULL;
320 /* Small sanity check that the queue is empty at the end of a function. */
322 void
323 finish_expr_for_function ()
325 if (pending_chain)
326 abort ();
329 /* Manage the queue of increment instructions to be output
330 for POSTINCREMENT_EXPR expressions, etc. */
332 /* Queue up to increment (or change) VAR later. BODY says how:
333 BODY should be the same thing you would pass to emit_insn
334 to increment right away. It will go to emit_insn later on.
336 The value is a QUEUED expression to be used in place of VAR
337 where you want to guarantee the pre-incrementation value of VAR. */
339 static rtx
340 enqueue_insn (var, body)
341 rtx var, body;
343 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
344 body, pending_chain);
345 return pending_chain;
348 /* Use protect_from_queue to convert a QUEUED expression
349 into something that you can put immediately into an instruction.
350 If the queued incrementation has not happened yet,
351 protect_from_queue returns the variable itself.
352 If the incrementation has happened, protect_from_queue returns a temp
353 that contains a copy of the old value of the variable.
355 Any time an rtx which might possibly be a QUEUED is to be put
356 into an instruction, it must be passed through protect_from_queue first.
357 QUEUED expressions are not meaningful in instructions.
359 Do not pass a value through protect_from_queue and then hold
360 on to it for a while before putting it in an instruction!
361 If the queue is flushed in between, incorrect code will result. */
364 protect_from_queue (x, modify)
365 register rtx x;
366 int modify;
368 register RTX_CODE code = GET_CODE (x);
370 #if 0 /* A QUEUED can hang around after the queue is forced out. */
371 /* Shortcut for most common case. */
372 if (pending_chain == 0)
373 return x;
374 #endif
376 if (code != QUEUED)
378 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
379 use of autoincrement. Make a copy of the contents of the memory
380 location rather than a copy of the address, but not if the value is
381 of mode BLKmode. Don't modify X in place since it might be
382 shared. */
383 if (code == MEM && GET_MODE (x) != BLKmode
384 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
386 register rtx y = XEXP (x, 0);
387 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
389 MEM_COPY_ATTRIBUTES (new, x);
391 if (QUEUED_INSN (y))
393 register rtx temp = gen_reg_rtx (GET_MODE (new));
394 emit_insn_before (gen_move_insn (temp, new),
395 QUEUED_INSN (y));
396 return temp;
398 /* Copy the address into a pseudo, so that the returned value
399 remains correct across calls to emit_queue. */
400 XEXP (new, 0) = copy_to_reg (XEXP (new, 0));
401 return new;
403 /* Otherwise, recursively protect the subexpressions of all
404 the kinds of rtx's that can contain a QUEUED. */
405 if (code == MEM)
407 rtx tem = protect_from_queue (XEXP (x, 0), 0);
408 if (tem != XEXP (x, 0))
410 x = copy_rtx (x);
411 XEXP (x, 0) = tem;
414 else if (code == PLUS || code == MULT)
416 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
417 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
418 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
420 x = copy_rtx (x);
421 XEXP (x, 0) = new0;
422 XEXP (x, 1) = new1;
425 return x;
427 /* If the increment has not happened, use the variable itself. Copy it
428 into a new pseudo so that the value remains correct across calls to
429 emit_queue. */
430 if (QUEUED_INSN (x) == 0)
431 return copy_to_reg (QUEUED_VAR (x));
432 /* If the increment has happened and a pre-increment copy exists,
433 use that copy. */
434 if (QUEUED_COPY (x) != 0)
435 return QUEUED_COPY (x);
436 /* The increment has happened but we haven't set up a pre-increment copy.
437 Set one up now, and use it. */
438 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
439 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
440 QUEUED_INSN (x));
441 return QUEUED_COPY (x);
444 /* Return nonzero if X contains a QUEUED expression:
445 if it contains anything that will be altered by a queued increment.
446 We handle only combinations of MEM, PLUS, MINUS and MULT operators
447 since memory addresses generally contain only those. */
450 queued_subexp_p (x)
451 rtx x;
453 register enum rtx_code code = GET_CODE (x);
454 switch (code)
456 case QUEUED:
457 return 1;
458 case MEM:
459 return queued_subexp_p (XEXP (x, 0));
460 case MULT:
461 case PLUS:
462 case MINUS:
463 return (queued_subexp_p (XEXP (x, 0))
464 || queued_subexp_p (XEXP (x, 1)));
465 default:
466 return 0;
470 /* Perform all the pending incrementations. */
472 void
473 emit_queue ()
475 register rtx p;
476 while ((p = pending_chain))
478 rtx body = QUEUED_BODY (p);
480 if (GET_CODE (body) == SEQUENCE)
482 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
483 emit_insn (QUEUED_BODY (p));
485 else
486 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
487 pending_chain = QUEUED_NEXT (p);
491 /* Copy data from FROM to TO, where the machine modes are not the same.
492 Both modes may be integer, or both may be floating.
493 UNSIGNEDP should be nonzero if FROM is an unsigned type.
494 This causes zero-extension instead of sign-extension. */
496 void
497 convert_move (to, from, unsignedp)
498 register rtx to, from;
499 int unsignedp;
501 enum machine_mode to_mode = GET_MODE (to);
502 enum machine_mode from_mode = GET_MODE (from);
503 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
504 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
505 enum insn_code code;
506 rtx libcall;
508 /* rtx code for making an equivalent value. */
509 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
511 to = protect_from_queue (to, 1);
512 from = protect_from_queue (from, 0);
514 if (to_real != from_real)
515 abort ();
517 /* If FROM is a SUBREG that indicates that we have already done at least
518 the required extension, strip it. We don't handle such SUBREGs as
519 TO here. */
521 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
522 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
523 >= GET_MODE_SIZE (to_mode))
524 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
525 from = gen_lowpart (to_mode, from), from_mode = to_mode;
527 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
528 abort ();
530 if (to_mode == from_mode
531 || (from_mode == VOIDmode && CONSTANT_P (from)))
533 emit_move_insn (to, from);
534 return;
537 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
539 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
540 abort ();
542 if (VECTOR_MODE_P (to_mode))
543 from = gen_rtx_SUBREG (to_mode, from, 0);
544 else
545 to = gen_rtx_SUBREG (from_mode, to, 0);
547 emit_move_insn (to, from);
548 return;
551 if (to_real != from_real)
552 abort ();
554 if (to_real)
556 rtx value, insns;
558 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
560 /* Try converting directly if the insn is supported. */
561 if ((code = can_extend_p (to_mode, from_mode, 0))
562 != CODE_FOR_nothing)
564 emit_unop_insn (code, to, from, UNKNOWN);
565 return;
569 #ifdef HAVE_trunchfqf2
570 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
572 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
573 return;
575 #endif
576 #ifdef HAVE_trunctqfqf2
577 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
579 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
580 return;
582 #endif
583 #ifdef HAVE_truncsfqf2
584 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
586 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
587 return;
589 #endif
590 #ifdef HAVE_truncdfqf2
591 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
593 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
594 return;
596 #endif
597 #ifdef HAVE_truncxfqf2
598 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
600 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
601 return;
603 #endif
604 #ifdef HAVE_trunctfqf2
605 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
607 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
608 return;
610 #endif
612 #ifdef HAVE_trunctqfhf2
613 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
615 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
616 return;
618 #endif
619 #ifdef HAVE_truncsfhf2
620 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
622 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
623 return;
625 #endif
626 #ifdef HAVE_truncdfhf2
627 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
629 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
630 return;
632 #endif
633 #ifdef HAVE_truncxfhf2
634 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
636 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
637 return;
639 #endif
640 #ifdef HAVE_trunctfhf2
641 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
643 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
644 return;
646 #endif
648 #ifdef HAVE_truncsftqf2
649 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
651 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
652 return;
654 #endif
655 #ifdef HAVE_truncdftqf2
656 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
658 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
659 return;
661 #endif
662 #ifdef HAVE_truncxftqf2
663 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
665 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
666 return;
668 #endif
669 #ifdef HAVE_trunctftqf2
670 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
672 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
673 return;
675 #endif
677 #ifdef HAVE_truncdfsf2
678 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
680 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
681 return;
683 #endif
684 #ifdef HAVE_truncxfsf2
685 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
687 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
688 return;
690 #endif
691 #ifdef HAVE_trunctfsf2
692 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
694 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
695 return;
697 #endif
698 #ifdef HAVE_truncxfdf2
699 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
701 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
702 return;
704 #endif
705 #ifdef HAVE_trunctfdf2
706 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
708 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
709 return;
711 #endif
713 libcall = (rtx) 0;
714 switch (from_mode)
716 case SFmode:
717 switch (to_mode)
719 case DFmode:
720 libcall = extendsfdf2_libfunc;
721 break;
723 case XFmode:
724 libcall = extendsfxf2_libfunc;
725 break;
727 case TFmode:
728 libcall = extendsftf2_libfunc;
729 break;
731 default:
732 break;
734 break;
736 case DFmode:
737 switch (to_mode)
739 case SFmode:
740 libcall = truncdfsf2_libfunc;
741 break;
743 case XFmode:
744 libcall = extenddfxf2_libfunc;
745 break;
747 case TFmode:
748 libcall = extenddftf2_libfunc;
749 break;
751 default:
752 break;
754 break;
756 case XFmode:
757 switch (to_mode)
759 case SFmode:
760 libcall = truncxfsf2_libfunc;
761 break;
763 case DFmode:
764 libcall = truncxfdf2_libfunc;
765 break;
767 default:
768 break;
770 break;
772 case TFmode:
773 switch (to_mode)
775 case SFmode:
776 libcall = trunctfsf2_libfunc;
777 break;
779 case DFmode:
780 libcall = trunctfdf2_libfunc;
781 break;
783 default:
784 break;
786 break;
788 default:
789 break;
792 if (libcall == (rtx) 0)
793 /* This conversion is not implemented yet. */
794 abort ();
796 start_sequence ();
797 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
798 1, from, from_mode);
799 insns = get_insns ();
800 end_sequence ();
801 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
802 from));
803 return;
806 /* Now both modes are integers. */
808 /* Handle expanding beyond a word. */
809 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
810 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
812 rtx insns;
813 rtx lowpart;
814 rtx fill_value;
815 rtx lowfrom;
816 int i;
817 enum machine_mode lowpart_mode;
818 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
820 /* Try converting directly if the insn is supported. */
821 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
822 != CODE_FOR_nothing)
824 /* If FROM is a SUBREG, put it into a register. Do this
825 so that we always generate the same set of insns for
826 better cse'ing; if an intermediate assignment occurred,
827 we won't be doing the operation directly on the SUBREG. */
828 if (optimize > 0 && GET_CODE (from) == SUBREG)
829 from = force_reg (from_mode, from);
830 emit_unop_insn (code, to, from, equiv_code);
831 return;
833 /* Next, try converting via full word. */
834 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
835 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
836 != CODE_FOR_nothing))
838 if (GET_CODE (to) == REG)
839 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
840 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
841 emit_unop_insn (code, to,
842 gen_lowpart (word_mode, to), equiv_code);
843 return;
846 /* No special multiword conversion insn; do it by hand. */
847 start_sequence ();
849 /* Since we will turn this into a no conflict block, we must ensure
850 that the source does not overlap the target. */
852 if (reg_overlap_mentioned_p (to, from))
853 from = force_reg (from_mode, from);
855 /* Get a copy of FROM widened to a word, if necessary. */
856 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
857 lowpart_mode = word_mode;
858 else
859 lowpart_mode = from_mode;
861 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
863 lowpart = gen_lowpart (lowpart_mode, to);
864 emit_move_insn (lowpart, lowfrom);
866 /* Compute the value to put in each remaining word. */
867 if (unsignedp)
868 fill_value = const0_rtx;
869 else
871 #ifdef HAVE_slt
872 if (HAVE_slt
873 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
874 && STORE_FLAG_VALUE == -1)
876 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
877 lowpart_mode, 0, 0);
878 fill_value = gen_reg_rtx (word_mode);
879 emit_insn (gen_slt (fill_value));
881 else
882 #endif
884 fill_value
885 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
886 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
887 NULL_RTX, 0);
888 fill_value = convert_to_mode (word_mode, fill_value, 1);
892 /* Fill the remaining words. */
893 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
895 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
896 rtx subword = operand_subword (to, index, 1, to_mode);
898 if (subword == 0)
899 abort ();
901 if (fill_value != subword)
902 emit_move_insn (subword, fill_value);
905 insns = get_insns ();
906 end_sequence ();
908 emit_no_conflict_block (insns, to, from, NULL_RTX,
909 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
910 return;
913 /* Truncating multi-word to a word or less. */
914 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
915 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
917 if (!((GET_CODE (from) == MEM
918 && ! MEM_VOLATILE_P (from)
919 && direct_load[(int) to_mode]
920 && ! mode_dependent_address_p (XEXP (from, 0)))
921 || GET_CODE (from) == REG
922 || GET_CODE (from) == SUBREG))
923 from = force_reg (from_mode, from);
924 convert_move (to, gen_lowpart (word_mode, from), 0);
925 return;
928 /* Handle pointer conversion. */ /* SPEE 900220. */
929 if (to_mode == PQImode)
931 if (from_mode != QImode)
932 from = convert_to_mode (QImode, from, unsignedp);
934 #ifdef HAVE_truncqipqi2
935 if (HAVE_truncqipqi2)
937 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
938 return;
940 #endif /* HAVE_truncqipqi2 */
941 abort ();
944 if (from_mode == PQImode)
946 if (to_mode != QImode)
948 from = convert_to_mode (QImode, from, unsignedp);
949 from_mode = QImode;
951 else
953 #ifdef HAVE_extendpqiqi2
954 if (HAVE_extendpqiqi2)
956 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
957 return;
959 #endif /* HAVE_extendpqiqi2 */
960 abort ();
964 if (to_mode == PSImode)
966 if (from_mode != SImode)
967 from = convert_to_mode (SImode, from, unsignedp);
969 #ifdef HAVE_truncsipsi2
970 if (HAVE_truncsipsi2)
972 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
973 return;
975 #endif /* HAVE_truncsipsi2 */
976 abort ();
979 if (from_mode == PSImode)
981 if (to_mode != SImode)
983 from = convert_to_mode (SImode, from, unsignedp);
984 from_mode = SImode;
986 else
988 #ifdef HAVE_extendpsisi2
989 if (! unsignedp && HAVE_extendpsisi2)
991 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
992 return;
994 #endif /* HAVE_extendpsisi2 */
995 #ifdef HAVE_zero_extendpsisi2
996 if (unsignedp && HAVE_zero_extendpsisi2)
998 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
999 return;
1001 #endif /* HAVE_zero_extendpsisi2 */
1002 abort ();
1006 if (to_mode == PDImode)
1008 if (from_mode != DImode)
1009 from = convert_to_mode (DImode, from, unsignedp);
1011 #ifdef HAVE_truncdipdi2
1012 if (HAVE_truncdipdi2)
1014 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1015 return;
1017 #endif /* HAVE_truncdipdi2 */
1018 abort ();
1021 if (from_mode == PDImode)
1023 if (to_mode != DImode)
1025 from = convert_to_mode (DImode, from, unsignedp);
1026 from_mode = DImode;
1028 else
1030 #ifdef HAVE_extendpdidi2
1031 if (HAVE_extendpdidi2)
1033 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1034 return;
1036 #endif /* HAVE_extendpdidi2 */
1037 abort ();
1041 /* Now follow all the conversions between integers
1042 no more than a word long. */
1044 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1045 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1046 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1047 GET_MODE_BITSIZE (from_mode)))
1049 if (!((GET_CODE (from) == MEM
1050 && ! MEM_VOLATILE_P (from)
1051 && direct_load[(int) to_mode]
1052 && ! mode_dependent_address_p (XEXP (from, 0)))
1053 || GET_CODE (from) == REG
1054 || GET_CODE (from) == SUBREG))
1055 from = force_reg (from_mode, from);
1056 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1057 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1058 from = copy_to_reg (from);
1059 emit_move_insn (to, gen_lowpart (to_mode, from));
1060 return;
1063 /* Handle extension. */
1064 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1066 /* Convert directly if that works. */
1067 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1068 != CODE_FOR_nothing)
1070 emit_unop_insn (code, to, from, equiv_code);
1071 return;
1073 else
1075 enum machine_mode intermediate;
1076 rtx tmp;
1077 tree shift_amount;
1079 /* Search for a mode to convert via. */
1080 for (intermediate = from_mode; intermediate != VOIDmode;
1081 intermediate = GET_MODE_WIDER_MODE (intermediate))
1082 if (((can_extend_p (to_mode, intermediate, unsignedp)
1083 != CODE_FOR_nothing)
1084 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1085 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1086 GET_MODE_BITSIZE (intermediate))))
1087 && (can_extend_p (intermediate, from_mode, unsignedp)
1088 != CODE_FOR_nothing))
1090 convert_move (to, convert_to_mode (intermediate, from,
1091 unsignedp), unsignedp);
1092 return;
1095 /* No suitable intermediate mode.
1096 Generate what we need with shifts. */
1097 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1098 - GET_MODE_BITSIZE (from_mode), 0);
1099 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1100 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1101 to, unsignedp);
1102 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1103 to, unsignedp);
1104 if (tmp != to)
1105 emit_move_insn (to, tmp);
1106 return;
1110 /* Support special truncate insns for certain modes. */
1112 if (from_mode == DImode && to_mode == SImode)
1114 #ifdef HAVE_truncdisi2
1115 if (HAVE_truncdisi2)
1117 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1118 return;
1120 #endif
1121 convert_move (to, force_reg (from_mode, from), unsignedp);
1122 return;
1125 if (from_mode == DImode && to_mode == HImode)
1127 #ifdef HAVE_truncdihi2
1128 if (HAVE_truncdihi2)
1130 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1131 return;
1133 #endif
1134 convert_move (to, force_reg (from_mode, from), unsignedp);
1135 return;
1138 if (from_mode == DImode && to_mode == QImode)
1140 #ifdef HAVE_truncdiqi2
1141 if (HAVE_truncdiqi2)
1143 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1144 return;
1146 #endif
1147 convert_move (to, force_reg (from_mode, from), unsignedp);
1148 return;
1151 if (from_mode == SImode && to_mode == HImode)
1153 #ifdef HAVE_truncsihi2
1154 if (HAVE_truncsihi2)
1156 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1157 return;
1159 #endif
1160 convert_move (to, force_reg (from_mode, from), unsignedp);
1161 return;
1164 if (from_mode == SImode && to_mode == QImode)
1166 #ifdef HAVE_truncsiqi2
1167 if (HAVE_truncsiqi2)
1169 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1170 return;
1172 #endif
1173 convert_move (to, force_reg (from_mode, from), unsignedp);
1174 return;
1177 if (from_mode == HImode && to_mode == QImode)
1179 #ifdef HAVE_trunchiqi2
1180 if (HAVE_trunchiqi2)
1182 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1183 return;
1185 #endif
1186 convert_move (to, force_reg (from_mode, from), unsignedp);
1187 return;
1190 if (from_mode == TImode && to_mode == DImode)
1192 #ifdef HAVE_trunctidi2
1193 if (HAVE_trunctidi2)
1195 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1196 return;
1198 #endif
1199 convert_move (to, force_reg (from_mode, from), unsignedp);
1200 return;
1203 if (from_mode == TImode && to_mode == SImode)
1205 #ifdef HAVE_trunctisi2
1206 if (HAVE_trunctisi2)
1208 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1209 return;
1211 #endif
1212 convert_move (to, force_reg (from_mode, from), unsignedp);
1213 return;
1216 if (from_mode == TImode && to_mode == HImode)
1218 #ifdef HAVE_trunctihi2
1219 if (HAVE_trunctihi2)
1221 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1222 return;
1224 #endif
1225 convert_move (to, force_reg (from_mode, from), unsignedp);
1226 return;
1229 if (from_mode == TImode && to_mode == QImode)
1231 #ifdef HAVE_trunctiqi2
1232 if (HAVE_trunctiqi2)
1234 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1235 return;
1237 #endif
1238 convert_move (to, force_reg (from_mode, from), unsignedp);
1239 return;
1242 /* Handle truncation of volatile memrefs, and so on;
1243 the things that couldn't be truncated directly,
1244 and for which there was no special instruction. */
1245 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1247 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1248 emit_move_insn (to, temp);
1249 return;
1252 /* Mode combination is not recognized. */
1253 abort ();
1256 /* Return an rtx for a value that would result
1257 from converting X to mode MODE.
1258 Both X and MODE may be floating, or both integer.
1259 UNSIGNEDP is nonzero if X is an unsigned value.
1260 This can be done by referring to a part of X in place
1261 or by copying to a new temporary with conversion.
1263 This function *must not* call protect_from_queue
1264 except when putting X into an insn (in which case convert_move does it). */
1267 convert_to_mode (mode, x, unsignedp)
1268 enum machine_mode mode;
1269 rtx x;
1270 int unsignedp;
1272 return convert_modes (mode, VOIDmode, x, unsignedp);
1275 /* Return an rtx for a value that would result
1276 from converting X from mode OLDMODE to mode MODE.
1277 Both modes may be floating, or both integer.
1278 UNSIGNEDP is nonzero if X is an unsigned value.
1280 This can be done by referring to a part of X in place
1281 or by copying to a new temporary with conversion.
1283 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1285 This function *must not* call protect_from_queue
1286 except when putting X into an insn (in which case convert_move does it). */
1289 convert_modes (mode, oldmode, x, unsignedp)
1290 enum machine_mode mode, oldmode;
1291 rtx x;
1292 int unsignedp;
1294 register rtx temp;
1296 /* If FROM is a SUBREG that indicates that we have already done at least
1297 the required extension, strip it. */
1299 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1300 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1301 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1302 x = gen_lowpart (mode, x);
1304 if (GET_MODE (x) != VOIDmode)
1305 oldmode = GET_MODE (x);
1307 if (mode == oldmode)
1308 return x;
1310 /* There is one case that we must handle specially: If we are converting
1311 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1312 we are to interpret the constant as unsigned, gen_lowpart will do
1313 the wrong if the constant appears negative. What we want to do is
1314 make the high-order word of the constant zero, not all ones. */
1316 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1317 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1318 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1320 HOST_WIDE_INT val = INTVAL (x);
1322 if (oldmode != VOIDmode
1323 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1325 int width = GET_MODE_BITSIZE (oldmode);
1327 /* We need to zero extend VAL. */
1328 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1331 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1334 /* We can do this with a gen_lowpart if both desired and current modes
1335 are integer, and this is either a constant integer, a register, or a
1336 non-volatile MEM. Except for the constant case where MODE is no
1337 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1339 if ((GET_CODE (x) == CONST_INT
1340 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1341 || (GET_MODE_CLASS (mode) == MODE_INT
1342 && GET_MODE_CLASS (oldmode) == MODE_INT
1343 && (GET_CODE (x) == CONST_DOUBLE
1344 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1345 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1346 && direct_load[(int) mode])
1347 || (GET_CODE (x) == REG
1348 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1349 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1351 /* ?? If we don't know OLDMODE, we have to assume here that
1352 X does not need sign- or zero-extension. This may not be
1353 the case, but it's the best we can do. */
1354 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1355 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1357 HOST_WIDE_INT val = INTVAL (x);
1358 int width = GET_MODE_BITSIZE (oldmode);
1360 /* We must sign or zero-extend in this case. Start by
1361 zero-extending, then sign extend if we need to. */
1362 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1363 if (! unsignedp
1364 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1365 val |= (HOST_WIDE_INT) (-1) << width;
1367 return GEN_INT (val);
1370 return gen_lowpart (mode, x);
1373 temp = gen_reg_rtx (mode);
1374 convert_move (temp, x, unsignedp);
1375 return temp;
1378 /* This macro is used to determine what the largest unit size that
1379 move_by_pieces can use is. */
1381 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1382 move efficiently, as opposed to MOVE_MAX which is the maximum
1383 number of bytes we can move with a single instruction. */
1385 #ifndef MOVE_MAX_PIECES
1386 #define MOVE_MAX_PIECES MOVE_MAX
1387 #endif
1389 /* Generate several move instructions to copy LEN bytes
1390 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1391 The caller must pass FROM and TO
1392 through protect_from_queue before calling.
1393 ALIGN is maximum alignment we can assume. */
1395 void
1396 move_by_pieces (to, from, len, align)
1397 rtx to, from;
1398 unsigned HOST_WIDE_INT len;
1399 unsigned int align;
1401 struct move_by_pieces data;
1402 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1403 unsigned int max_size = MOVE_MAX_PIECES + 1;
1404 enum machine_mode mode = VOIDmode, tmode;
1405 enum insn_code icode;
1407 data.offset = 0;
1408 data.to_addr = to_addr;
1409 data.from_addr = from_addr;
1410 data.to = to;
1411 data.from = from;
1412 data.autinc_to
1413 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1414 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1415 data.autinc_from
1416 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1417 || GET_CODE (from_addr) == POST_INC
1418 || GET_CODE (from_addr) == POST_DEC);
1420 data.explicit_inc_from = 0;
1421 data.explicit_inc_to = 0;
1422 data.reverse
1423 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1424 if (data.reverse) data.offset = len;
1425 data.len = len;
1427 /* If copying requires more than two move insns,
1428 copy addresses to registers (to make displacements shorter)
1429 and use post-increment if available. */
1430 if (!(data.autinc_from && data.autinc_to)
1431 && move_by_pieces_ninsns (len, align) > 2)
1433 /* Find the mode of the largest move... */
1434 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1435 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1436 if (GET_MODE_SIZE (tmode) < max_size)
1437 mode = tmode;
1439 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1441 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1442 data.autinc_from = 1;
1443 data.explicit_inc_from = -1;
1445 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1447 data.from_addr = copy_addr_to_reg (from_addr);
1448 data.autinc_from = 1;
1449 data.explicit_inc_from = 1;
1451 if (!data.autinc_from && CONSTANT_P (from_addr))
1452 data.from_addr = copy_addr_to_reg (from_addr);
1453 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1455 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1456 data.autinc_to = 1;
1457 data.explicit_inc_to = -1;
1459 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1461 data.to_addr = copy_addr_to_reg (to_addr);
1462 data.autinc_to = 1;
1463 data.explicit_inc_to = 1;
1465 if (!data.autinc_to && CONSTANT_P (to_addr))
1466 data.to_addr = copy_addr_to_reg (to_addr);
1469 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1470 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1471 align = MOVE_MAX * BITS_PER_UNIT;
1473 /* First move what we can in the largest integer mode, then go to
1474 successively smaller modes. */
1476 while (max_size > 1)
1478 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1479 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1480 if (GET_MODE_SIZE (tmode) < max_size)
1481 mode = tmode;
1483 if (mode == VOIDmode)
1484 break;
1486 icode = mov_optab->handlers[(int) mode].insn_code;
1487 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1488 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1490 max_size = GET_MODE_SIZE (mode);
1493 /* The code above should have handled everything. */
1494 if (data.len > 0)
1495 abort ();
1498 /* Return number of insns required to move L bytes by pieces.
1499 ALIGN (in bits) is maximum alignment we can assume. */
1501 static unsigned HOST_WIDE_INT
1502 move_by_pieces_ninsns (l, align)
1503 unsigned HOST_WIDE_INT l;
1504 unsigned int align;
1506 unsigned HOST_WIDE_INT n_insns = 0;
1507 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1509 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1510 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1511 align = MOVE_MAX * BITS_PER_UNIT;
1513 while (max_size > 1)
1515 enum machine_mode mode = VOIDmode, tmode;
1516 enum insn_code icode;
1518 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1519 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1520 if (GET_MODE_SIZE (tmode) < max_size)
1521 mode = tmode;
1523 if (mode == VOIDmode)
1524 break;
1526 icode = mov_optab->handlers[(int) mode].insn_code;
1527 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1528 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1530 max_size = GET_MODE_SIZE (mode);
1533 if (l)
1534 abort ();
1535 return n_insns;
1538 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1539 with move instructions for mode MODE. GENFUN is the gen_... function
1540 to make a move insn for that mode. DATA has all the other info. */
1542 static void
1543 move_by_pieces_1 (genfun, mode, data)
1544 rtx (*genfun) PARAMS ((rtx, ...));
1545 enum machine_mode mode;
1546 struct move_by_pieces *data;
1548 unsigned int size = GET_MODE_SIZE (mode);
1549 rtx to1, from1;
1551 while (data->len >= size)
1553 if (data->reverse)
1554 data->offset -= size;
1556 if (data->autinc_to)
1558 to1 = gen_rtx_MEM (mode, data->to_addr);
1559 MEM_COPY_ATTRIBUTES (to1, data->to);
1561 else
1562 to1 = change_address (data->to, mode,
1563 plus_constant (data->to_addr, data->offset));
1565 if (data->autinc_from)
1567 from1 = gen_rtx_MEM (mode, data->from_addr);
1568 MEM_COPY_ATTRIBUTES (from1, data->from);
1570 else
1571 from1 = change_address (data->from, mode,
1572 plus_constant (data->from_addr, data->offset));
1574 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1575 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1576 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1577 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1579 emit_insn ((*genfun) (to1, from1));
1581 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1582 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1583 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1584 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1586 if (! data->reverse)
1587 data->offset += size;
1589 data->len -= size;
1593 /* Emit code to move a block Y to a block X.
1594 This may be done with string-move instructions,
1595 with multiple scalar move instructions, or with a library call.
1597 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1598 with mode BLKmode.
1599 SIZE is an rtx that says how long they are.
1600 ALIGN is the maximum alignment we can assume they have.
1602 Return the address of the new block, if memcpy is called and returns it,
1603 0 otherwise. */
1606 emit_block_move (x, y, size, align)
1607 rtx x, y;
1608 rtx size;
1609 unsigned int align;
1611 rtx retval = 0;
1612 #ifdef TARGET_MEM_FUNCTIONS
1613 static tree fn;
1614 tree call_expr, arg_list;
1615 #endif
1617 if (GET_MODE (x) != BLKmode)
1618 abort ();
1620 if (GET_MODE (y) != BLKmode)
1621 abort ();
1623 x = protect_from_queue (x, 1);
1624 y = protect_from_queue (y, 0);
1625 size = protect_from_queue (size, 0);
1627 if (GET_CODE (x) != MEM)
1628 abort ();
1629 if (GET_CODE (y) != MEM)
1630 abort ();
1631 if (size == 0)
1632 abort ();
1634 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1635 move_by_pieces (x, y, INTVAL (size), align);
1636 else
1638 /* Try the most limited insn first, because there's no point
1639 including more than one in the machine description unless
1640 the more limited one has some advantage. */
1642 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1643 enum machine_mode mode;
1645 /* Since this is a move insn, we don't care about volatility. */
1646 volatile_ok = 1;
1648 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1649 mode = GET_MODE_WIDER_MODE (mode))
1651 enum insn_code code = movstr_optab[(int) mode];
1652 insn_operand_predicate_fn pred;
1654 if (code != CODE_FOR_nothing
1655 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1656 here because if SIZE is less than the mode mask, as it is
1657 returned by the macro, it will definitely be less than the
1658 actual mode mask. */
1659 && ((GET_CODE (size) == CONST_INT
1660 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1661 <= (GET_MODE_MASK (mode) >> 1)))
1662 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1663 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1664 || (*pred) (x, BLKmode))
1665 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1666 || (*pred) (y, BLKmode))
1667 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1668 || (*pred) (opalign, VOIDmode)))
1670 rtx op2;
1671 rtx last = get_last_insn ();
1672 rtx pat;
1674 op2 = convert_to_mode (mode, size, 1);
1675 pred = insn_data[(int) code].operand[2].predicate;
1676 if (pred != 0 && ! (*pred) (op2, mode))
1677 op2 = copy_to_mode_reg (mode, op2);
1679 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1680 if (pat)
1682 emit_insn (pat);
1683 volatile_ok = 0;
1684 return 0;
1686 else
1687 delete_insns_since (last);
1691 volatile_ok = 0;
1693 /* X, Y, or SIZE may have been passed through protect_from_queue.
1695 It is unsafe to save the value generated by protect_from_queue
1696 and reuse it later. Consider what happens if emit_queue is
1697 called before the return value from protect_from_queue is used.
1699 Expansion of the CALL_EXPR below will call emit_queue before
1700 we are finished emitting RTL for argument setup. So if we are
1701 not careful we could get the wrong value for an argument.
1703 To avoid this problem we go ahead and emit code to copy X, Y &
1704 SIZE into new pseudos. We can then place those new pseudos
1705 into an RTL_EXPR and use them later, even after a call to
1706 emit_queue.
1708 Note this is not strictly needed for library calls since they
1709 do not call emit_queue before loading their arguments. However,
1710 we may need to have library calls call emit_queue in the future
1711 since failing to do so could cause problems for targets which
1712 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1713 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1714 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1716 #ifdef TARGET_MEM_FUNCTIONS
1717 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1718 #else
1719 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1720 TREE_UNSIGNED (integer_type_node));
1721 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1722 #endif
1724 #ifdef TARGET_MEM_FUNCTIONS
1725 /* It is incorrect to use the libcall calling conventions to call
1726 memcpy in this context.
1728 This could be a user call to memcpy and the user may wish to
1729 examine the return value from memcpy.
1731 For targets where libcalls and normal calls have different conventions
1732 for returning pointers, we could end up generating incorrect code.
1734 So instead of using a libcall sequence we build up a suitable
1735 CALL_EXPR and expand the call in the normal fashion. */
1736 if (fn == NULL_TREE)
1738 tree fntype;
1740 /* This was copied from except.c, I don't know if all this is
1741 necessary in this context or not. */
1742 fn = get_identifier ("memcpy");
1743 fntype = build_pointer_type (void_type_node);
1744 fntype = build_function_type (fntype, NULL_TREE);
1745 fn = build_decl (FUNCTION_DECL, fn, fntype);
1746 ggc_add_tree_root (&fn, 1);
1747 DECL_EXTERNAL (fn) = 1;
1748 TREE_PUBLIC (fn) = 1;
1749 DECL_ARTIFICIAL (fn) = 1;
1750 make_decl_rtl (fn, NULL_PTR);
1751 assemble_external (fn);
1754 /* We need to make an argument list for the function call.
1756 memcpy has three arguments, the first two are void * addresses and
1757 the last is a size_t byte count for the copy. */
1758 arg_list
1759 = build_tree_list (NULL_TREE,
1760 make_tree (build_pointer_type (void_type_node), x));
1761 TREE_CHAIN (arg_list)
1762 = build_tree_list (NULL_TREE,
1763 make_tree (build_pointer_type (void_type_node), y));
1764 TREE_CHAIN (TREE_CHAIN (arg_list))
1765 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1766 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1768 /* Now we have to build up the CALL_EXPR itself. */
1769 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1770 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1771 call_expr, arg_list, NULL_TREE);
1772 TREE_SIDE_EFFECTS (call_expr) = 1;
1774 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1775 #else
1776 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1777 VOIDmode, 3, y, Pmode, x, Pmode,
1778 convert_to_mode (TYPE_MODE (integer_type_node), size,
1779 TREE_UNSIGNED (integer_type_node)),
1780 TYPE_MODE (integer_type_node));
1781 #endif
1784 return retval;
1787 /* Copy all or part of a value X into registers starting at REGNO.
1788 The number of registers to be filled is NREGS. */
1790 void
1791 move_block_to_reg (regno, x, nregs, mode)
1792 int regno;
1793 rtx x;
1794 int nregs;
1795 enum machine_mode mode;
1797 int i;
1798 #ifdef HAVE_load_multiple
1799 rtx pat;
1800 rtx last;
1801 #endif
1803 if (nregs == 0)
1804 return;
1806 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1807 x = validize_mem (force_const_mem (mode, x));
1809 /* See if the machine can do this with a load multiple insn. */
1810 #ifdef HAVE_load_multiple
1811 if (HAVE_load_multiple)
1813 last = get_last_insn ();
1814 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1815 GEN_INT (nregs));
1816 if (pat)
1818 emit_insn (pat);
1819 return;
1821 else
1822 delete_insns_since (last);
1824 #endif
1826 for (i = 0; i < nregs; i++)
1827 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1828 operand_subword_force (x, i, mode));
1831 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1832 The number of registers to be filled is NREGS. SIZE indicates the number
1833 of bytes in the object X. */
1835 void
1836 move_block_from_reg (regno, x, nregs, size)
1837 int regno;
1838 rtx x;
1839 int nregs;
1840 int size;
1842 int i;
1843 #ifdef HAVE_store_multiple
1844 rtx pat;
1845 rtx last;
1846 #endif
1847 enum machine_mode mode;
1849 if (nregs == 0)
1850 return;
1852 /* If SIZE is that of a mode no bigger than a word, just use that
1853 mode's store operation. */
1854 if (size <= UNITS_PER_WORD
1855 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1857 emit_move_insn (change_address (x, mode, NULL),
1858 gen_rtx_REG (mode, regno));
1859 return;
1862 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1863 to the left before storing to memory. Note that the previous test
1864 doesn't handle all cases (e.g. SIZE == 3). */
1865 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1867 rtx tem = operand_subword (x, 0, 1, BLKmode);
1868 rtx shift;
1870 if (tem == 0)
1871 abort ();
1873 shift = expand_shift (LSHIFT_EXPR, word_mode,
1874 gen_rtx_REG (word_mode, regno),
1875 build_int_2 ((UNITS_PER_WORD - size)
1876 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1877 emit_move_insn (tem, shift);
1878 return;
1881 /* See if the machine can do this with a store multiple insn. */
1882 #ifdef HAVE_store_multiple
1883 if (HAVE_store_multiple)
1885 last = get_last_insn ();
1886 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1887 GEN_INT (nregs));
1888 if (pat)
1890 emit_insn (pat);
1891 return;
1893 else
1894 delete_insns_since (last);
1896 #endif
1898 for (i = 0; i < nregs; i++)
1900 rtx tem = operand_subword (x, i, 1, BLKmode);
1902 if (tem == 0)
1903 abort ();
1905 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1909 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1910 registers represented by a PARALLEL. SSIZE represents the total size of
1911 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1912 SRC in bits. */
1913 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1914 the balance will be in what would be the low-order memory addresses, i.e.
1915 left justified for big endian, right justified for little endian. This
1916 happens to be true for the targets currently using this support. If this
1917 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1918 would be needed. */
1920 void
1921 emit_group_load (dst, orig_src, ssize, align)
1922 rtx dst, orig_src;
1923 unsigned int align;
1924 int ssize;
1926 rtx *tmps, src;
1927 int start, i;
1929 if (GET_CODE (dst) != PARALLEL)
1930 abort ();
1932 /* Check for a NULL entry, used to indicate that the parameter goes
1933 both on the stack and in registers. */
1934 if (XEXP (XVECEXP (dst, 0, 0), 0))
1935 start = 0;
1936 else
1937 start = 1;
1939 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1941 /* Process the pieces. */
1942 for (i = start; i < XVECLEN (dst, 0); i++)
1944 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1945 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1946 unsigned int bytelen = GET_MODE_SIZE (mode);
1947 int shift = 0;
1949 /* Handle trailing fragments that run over the size of the struct. */
1950 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1952 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1953 bytelen = ssize - bytepos;
1954 if (bytelen <= 0)
1955 abort ();
1958 /* If we won't be loading directly from memory, protect the real source
1959 from strange tricks we might play; but make sure that the source can
1960 be loaded directly into the destination. */
1961 src = orig_src;
1962 if (GET_CODE (orig_src) != MEM
1963 && (!CONSTANT_P (orig_src)
1964 || (GET_MODE (orig_src) != mode
1965 && GET_MODE (orig_src) != VOIDmode)))
1967 if (GET_MODE (orig_src) == VOIDmode)
1968 src = gen_reg_rtx (mode);
1969 else
1970 src = gen_reg_rtx (GET_MODE (orig_src));
1971 emit_move_insn (src, orig_src);
1974 /* Optimize the access just a bit. */
1975 if (GET_CODE (src) == MEM
1976 && align >= GET_MODE_ALIGNMENT (mode)
1977 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1978 && bytelen == GET_MODE_SIZE (mode))
1980 tmps[i] = gen_reg_rtx (mode);
1981 emit_move_insn (tmps[i],
1982 change_address (src, mode,
1983 plus_constant (XEXP (src, 0),
1984 bytepos)));
1986 else if (GET_CODE (src) == CONCAT)
1988 if (bytepos == 0
1989 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1990 tmps[i] = XEXP (src, 0);
1991 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1992 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1993 tmps[i] = XEXP (src, 1);
1994 else
1995 abort ();
1997 else if (CONSTANT_P (src)
1998 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
1999 tmps[i] = src;
2000 else
2001 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2002 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2003 mode, mode, align, ssize);
2005 if (BYTES_BIG_ENDIAN && shift)
2006 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2007 tmps[i], 0, OPTAB_WIDEN);
2010 emit_queue ();
2012 /* Copy the extracted pieces into the proper (probable) hard regs. */
2013 for (i = start; i < XVECLEN (dst, 0); i++)
2014 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2017 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2018 registers represented by a PARALLEL. SSIZE represents the total size of
2019 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2021 void
2022 emit_group_store (orig_dst, src, ssize, align)
2023 rtx orig_dst, src;
2024 int ssize;
2025 unsigned int align;
2027 rtx *tmps, dst;
2028 int start, i;
2030 if (GET_CODE (src) != PARALLEL)
2031 abort ();
2033 /* Check for a NULL entry, used to indicate that the parameter goes
2034 both on the stack and in registers. */
2035 if (XEXP (XVECEXP (src, 0, 0), 0))
2036 start = 0;
2037 else
2038 start = 1;
2040 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2042 /* Copy the (probable) hard regs into pseudos. */
2043 for (i = start; i < XVECLEN (src, 0); i++)
2045 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2046 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2047 emit_move_insn (tmps[i], reg);
2049 emit_queue ();
2051 /* If we won't be storing directly into memory, protect the real destination
2052 from strange tricks we might play. */
2053 dst = orig_dst;
2054 if (GET_CODE (dst) == PARALLEL)
2056 rtx temp;
2058 /* We can get a PARALLEL dst if there is a conditional expression in
2059 a return statement. In that case, the dst and src are the same,
2060 so no action is necessary. */
2061 if (rtx_equal_p (dst, src))
2062 return;
2064 /* It is unclear if we can ever reach here, but we may as well handle
2065 it. Allocate a temporary, and split this into a store/load to/from
2066 the temporary. */
2068 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2069 emit_group_store (temp, src, ssize, align);
2070 emit_group_load (dst, temp, ssize, align);
2071 return;
2073 else if (GET_CODE (dst) != MEM)
2075 dst = gen_reg_rtx (GET_MODE (orig_dst));
2076 /* Make life a bit easier for combine. */
2077 emit_move_insn (dst, const0_rtx);
2080 /* Process the pieces. */
2081 for (i = start; i < XVECLEN (src, 0); i++)
2083 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2084 enum machine_mode mode = GET_MODE (tmps[i]);
2085 unsigned int bytelen = GET_MODE_SIZE (mode);
2087 /* Handle trailing fragments that run over the size of the struct. */
2088 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2090 if (BYTES_BIG_ENDIAN)
2092 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2093 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2094 tmps[i], 0, OPTAB_WIDEN);
2096 bytelen = ssize - bytepos;
2099 /* Optimize the access just a bit. */
2100 if (GET_CODE (dst) == MEM
2101 && align >= GET_MODE_ALIGNMENT (mode)
2102 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2103 && bytelen == GET_MODE_SIZE (mode))
2104 emit_move_insn (change_address (dst, mode,
2105 plus_constant (XEXP (dst, 0),
2106 bytepos)),
2107 tmps[i]);
2108 else
2109 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2110 mode, tmps[i], align, ssize);
2113 emit_queue ();
2115 /* Copy from the pseudo into the (probable) hard reg. */
2116 if (GET_CODE (dst) == REG)
2117 emit_move_insn (orig_dst, dst);
2120 /* Generate code to copy a BLKmode object of TYPE out of a
2121 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2122 is null, a stack temporary is created. TGTBLK is returned.
2124 The primary purpose of this routine is to handle functions
2125 that return BLKmode structures in registers. Some machines
2126 (the PA for example) want to return all small structures
2127 in registers regardless of the structure's alignment. */
2130 copy_blkmode_from_reg (tgtblk, srcreg, type)
2131 rtx tgtblk;
2132 rtx srcreg;
2133 tree type;
2135 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2136 rtx src = NULL, dst = NULL;
2137 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2138 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2140 if (tgtblk == 0)
2142 tgtblk = assign_temp (build_qualified_type (type,
2143 (TYPE_QUALS (type)
2144 | TYPE_QUAL_CONST)),
2145 0, 1, 1);
2146 preserve_temp_slots (tgtblk);
2149 /* This code assumes srcreg is at least a full word. If it isn't,
2150 copy it into a new pseudo which is a full word. */
2151 if (GET_MODE (srcreg) != BLKmode
2152 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2153 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2155 /* Structures whose size is not a multiple of a word are aligned
2156 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2157 machine, this means we must skip the empty high order bytes when
2158 calculating the bit offset. */
2159 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2160 big_endian_correction
2161 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2163 /* Copy the structure BITSIZE bites at a time.
2165 We could probably emit more efficient code for machines which do not use
2166 strict alignment, but it doesn't seem worth the effort at the current
2167 time. */
2168 for (bitpos = 0, xbitpos = big_endian_correction;
2169 bitpos < bytes * BITS_PER_UNIT;
2170 bitpos += bitsize, xbitpos += bitsize)
2172 /* We need a new source operand each time xbitpos is on a
2173 word boundary and when xbitpos == big_endian_correction
2174 (the first time through). */
2175 if (xbitpos % BITS_PER_WORD == 0
2176 || xbitpos == big_endian_correction)
2177 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, BLKmode);
2179 /* We need a new destination operand each time bitpos is on
2180 a word boundary. */
2181 if (bitpos % BITS_PER_WORD == 0)
2182 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2184 /* Use xbitpos for the source extraction (right justified) and
2185 xbitpos for the destination store (left justified). */
2186 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2187 extract_bit_field (src, bitsize,
2188 xbitpos % BITS_PER_WORD, 1,
2189 NULL_RTX, word_mode, word_mode,
2190 bitsize, BITS_PER_WORD),
2191 bitsize, BITS_PER_WORD);
2194 return tgtblk;
2197 /* Add a USE expression for REG to the (possibly empty) list pointed
2198 to by CALL_FUSAGE. REG must denote a hard register. */
2200 void
2201 use_reg (call_fusage, reg)
2202 rtx *call_fusage, reg;
2204 if (GET_CODE (reg) != REG
2205 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2206 abort ();
2208 *call_fusage
2209 = gen_rtx_EXPR_LIST (VOIDmode,
2210 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2213 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2214 starting at REGNO. All of these registers must be hard registers. */
2216 void
2217 use_regs (call_fusage, regno, nregs)
2218 rtx *call_fusage;
2219 int regno;
2220 int nregs;
2222 int i;
2224 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2225 abort ();
2227 for (i = 0; i < nregs; i++)
2228 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2231 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2232 PARALLEL REGS. This is for calls that pass values in multiple
2233 non-contiguous locations. The Irix 6 ABI has examples of this. */
2235 void
2236 use_group_regs (call_fusage, regs)
2237 rtx *call_fusage;
2238 rtx regs;
2240 int i;
2242 for (i = 0; i < XVECLEN (regs, 0); i++)
2244 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2246 /* A NULL entry means the parameter goes both on the stack and in
2247 registers. This can also be a MEM for targets that pass values
2248 partially on the stack and partially in registers. */
2249 if (reg != 0 && GET_CODE (reg) == REG)
2250 use_reg (call_fusage, reg);
2256 can_store_by_pieces (len, constfun, constfundata, align)
2257 unsigned HOST_WIDE_INT len;
2258 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2259 PTR constfundata;
2260 unsigned int align;
2262 unsigned HOST_WIDE_INT max_size, l;
2263 HOST_WIDE_INT offset = 0;
2264 enum machine_mode mode, tmode;
2265 enum insn_code icode;
2266 int reverse;
2267 rtx cst;
2269 if (! MOVE_BY_PIECES_P (len, align))
2270 return 0;
2272 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2273 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2274 align = MOVE_MAX * BITS_PER_UNIT;
2276 /* We would first store what we can in the largest integer mode, then go to
2277 successively smaller modes. */
2279 for (reverse = 0;
2280 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2281 reverse++)
2283 l = len;
2284 mode = VOIDmode;
2285 max_size = MOVE_MAX_PIECES + 1;
2286 while (max_size > 1)
2288 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2289 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2290 if (GET_MODE_SIZE (tmode) < max_size)
2291 mode = tmode;
2293 if (mode == VOIDmode)
2294 break;
2296 icode = mov_optab->handlers[(int) mode].insn_code;
2297 if (icode != CODE_FOR_nothing
2298 && align >= GET_MODE_ALIGNMENT (mode))
2300 unsigned int size = GET_MODE_SIZE (mode);
2302 while (l >= size)
2304 if (reverse)
2305 offset -= size;
2307 cst = (*constfun) (constfundata, offset, mode);
2308 if (!LEGITIMATE_CONSTANT_P (cst))
2309 return 0;
2311 if (!reverse)
2312 offset += size;
2314 l -= size;
2318 max_size = GET_MODE_SIZE (mode);
2321 /* The code above should have handled everything. */
2322 if (l != 0)
2323 abort ();
2326 return 1;
2329 /* Generate several move instructions to store LEN bytes generated by
2330 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2331 pointer which will be passed as argument in every CONSTFUN call.
2332 ALIGN is maximum alignment we can assume. */
2334 void
2335 store_by_pieces (to, len, constfun, constfundata, align)
2336 rtx to;
2337 unsigned HOST_WIDE_INT len;
2338 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2339 PTR constfundata;
2340 unsigned int align;
2342 struct store_by_pieces data;
2344 if (! MOVE_BY_PIECES_P (len, align))
2345 abort ();
2346 to = protect_from_queue (to, 1);
2347 data.constfun = constfun;
2348 data.constfundata = constfundata;
2349 data.len = len;
2350 data.to = to;
2351 store_by_pieces_1 (&data, align);
2354 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2355 rtx with BLKmode). The caller must pass TO through protect_from_queue
2356 before calling. ALIGN is maximum alignment we can assume. */
2358 static void
2359 clear_by_pieces (to, len, align)
2360 rtx to;
2361 unsigned HOST_WIDE_INT len;
2362 unsigned int align;
2364 struct store_by_pieces data;
2366 data.constfun = clear_by_pieces_1;
2367 data.constfundata = NULL_PTR;
2368 data.len = len;
2369 data.to = to;
2370 store_by_pieces_1 (&data, align);
2373 /* Callback routine for clear_by_pieces.
2374 Return const0_rtx unconditionally. */
2376 static rtx
2377 clear_by_pieces_1 (data, offset, mode)
2378 PTR data ATTRIBUTE_UNUSED;
2379 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2380 enum machine_mode mode ATTRIBUTE_UNUSED;
2382 return const0_rtx;
2385 /* Subroutine of clear_by_pieces and store_by_pieces.
2386 Generate several move instructions to store LEN bytes of block TO. (A MEM
2387 rtx with BLKmode). The caller must pass TO through protect_from_queue
2388 before calling. ALIGN is maximum alignment we can assume. */
2390 static void
2391 store_by_pieces_1 (data, align)
2392 struct store_by_pieces *data;
2393 unsigned int align;
2395 rtx to_addr = XEXP (data->to, 0);
2396 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2397 enum machine_mode mode = VOIDmode, tmode;
2398 enum insn_code icode;
2400 data->offset = 0;
2401 data->to_addr = to_addr;
2402 data->autinc_to
2403 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2404 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2406 data->explicit_inc_to = 0;
2407 data->reverse
2408 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2409 if (data->reverse)
2410 data->offset = data->len;
2412 /* If storing requires more than two move insns,
2413 copy addresses to registers (to make displacements shorter)
2414 and use post-increment if available. */
2415 if (!data->autinc_to
2416 && move_by_pieces_ninsns (data->len, align) > 2)
2418 /* Determine the main mode we'll be using. */
2419 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2420 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2421 if (GET_MODE_SIZE (tmode) < max_size)
2422 mode = tmode;
2424 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2426 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2427 data->autinc_to = 1;
2428 data->explicit_inc_to = -1;
2431 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2432 && ! data->autinc_to)
2434 data->to_addr = copy_addr_to_reg (to_addr);
2435 data->autinc_to = 1;
2436 data->explicit_inc_to = 1;
2439 if ( !data->autinc_to && CONSTANT_P (to_addr))
2440 data->to_addr = copy_addr_to_reg (to_addr);
2443 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2444 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2445 align = MOVE_MAX * BITS_PER_UNIT;
2447 /* First store what we can in the largest integer mode, then go to
2448 successively smaller modes. */
2450 while (max_size > 1)
2452 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2453 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2454 if (GET_MODE_SIZE (tmode) < max_size)
2455 mode = tmode;
2457 if (mode == VOIDmode)
2458 break;
2460 icode = mov_optab->handlers[(int) mode].insn_code;
2461 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2462 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2464 max_size = GET_MODE_SIZE (mode);
2467 /* The code above should have handled everything. */
2468 if (data->len != 0)
2469 abort ();
2472 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2473 with move instructions for mode MODE. GENFUN is the gen_... function
2474 to make a move insn for that mode. DATA has all the other info. */
2476 static void
2477 store_by_pieces_2 (genfun, mode, data)
2478 rtx (*genfun) PARAMS ((rtx, ...));
2479 enum machine_mode mode;
2480 struct store_by_pieces *data;
2482 unsigned int size = GET_MODE_SIZE (mode);
2483 rtx to1, cst;
2485 while (data->len >= size)
2487 if (data->reverse)
2488 data->offset -= size;
2490 if (data->autinc_to)
2492 to1 = gen_rtx_MEM (mode, data->to_addr);
2493 MEM_COPY_ATTRIBUTES (to1, data->to);
2495 else
2496 to1 = change_address (data->to, mode,
2497 plus_constant (data->to_addr, data->offset));
2499 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2500 emit_insn (gen_add2_insn (data->to_addr,
2501 GEN_INT (-(HOST_WIDE_INT) size)));
2503 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2504 emit_insn ((*genfun) (to1, cst));
2506 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2507 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2509 if (! data->reverse)
2510 data->offset += size;
2512 data->len -= size;
2516 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2517 its length in bytes and ALIGN is the maximum alignment we can is has.
2519 If we call a function that returns the length of the block, return it. */
2522 clear_storage (object, size, align)
2523 rtx object;
2524 rtx size;
2525 unsigned int align;
2527 #ifdef TARGET_MEM_FUNCTIONS
2528 static tree fn;
2529 tree call_expr, arg_list;
2530 #endif
2531 rtx retval = 0;
2533 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2534 just move a zero. Otherwise, do this a piece at a time. */
2535 if (GET_MODE (object) != BLKmode
2536 && GET_CODE (size) == CONST_INT
2537 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2538 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2539 else
2541 object = protect_from_queue (object, 1);
2542 size = protect_from_queue (size, 0);
2544 if (GET_CODE (size) == CONST_INT
2545 && MOVE_BY_PIECES_P (INTVAL (size), align))
2546 clear_by_pieces (object, INTVAL (size), align);
2547 else
2549 /* Try the most limited insn first, because there's no point
2550 including more than one in the machine description unless
2551 the more limited one has some advantage. */
2553 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2554 enum machine_mode mode;
2556 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2557 mode = GET_MODE_WIDER_MODE (mode))
2559 enum insn_code code = clrstr_optab[(int) mode];
2560 insn_operand_predicate_fn pred;
2562 if (code != CODE_FOR_nothing
2563 /* We don't need MODE to be narrower than
2564 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2565 the mode mask, as it is returned by the macro, it will
2566 definitely be less than the actual mode mask. */
2567 && ((GET_CODE (size) == CONST_INT
2568 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2569 <= (GET_MODE_MASK (mode) >> 1)))
2570 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2571 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2572 || (*pred) (object, BLKmode))
2573 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2574 || (*pred) (opalign, VOIDmode)))
2576 rtx op1;
2577 rtx last = get_last_insn ();
2578 rtx pat;
2580 op1 = convert_to_mode (mode, size, 1);
2581 pred = insn_data[(int) code].operand[1].predicate;
2582 if (pred != 0 && ! (*pred) (op1, mode))
2583 op1 = copy_to_mode_reg (mode, op1);
2585 pat = GEN_FCN ((int) code) (object, op1, opalign);
2586 if (pat)
2588 emit_insn (pat);
2589 return 0;
2591 else
2592 delete_insns_since (last);
2596 /* OBJECT or SIZE may have been passed through protect_from_queue.
2598 It is unsafe to save the value generated by protect_from_queue
2599 and reuse it later. Consider what happens if emit_queue is
2600 called before the return value from protect_from_queue is used.
2602 Expansion of the CALL_EXPR below will call emit_queue before
2603 we are finished emitting RTL for argument setup. So if we are
2604 not careful we could get the wrong value for an argument.
2606 To avoid this problem we go ahead and emit code to copy OBJECT
2607 and SIZE into new pseudos. We can then place those new pseudos
2608 into an RTL_EXPR and use them later, even after a call to
2609 emit_queue.
2611 Note this is not strictly needed for library calls since they
2612 do not call emit_queue before loading their arguments. However,
2613 we may need to have library calls call emit_queue in the future
2614 since failing to do so could cause problems for targets which
2615 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2616 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2618 #ifdef TARGET_MEM_FUNCTIONS
2619 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2620 #else
2621 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2622 TREE_UNSIGNED (integer_type_node));
2623 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2624 #endif
2626 #ifdef TARGET_MEM_FUNCTIONS
2627 /* It is incorrect to use the libcall calling conventions to call
2628 memset in this context.
2630 This could be a user call to memset and the user may wish to
2631 examine the return value from memset.
2633 For targets where libcalls and normal calls have different
2634 conventions for returning pointers, we could end up generating
2635 incorrect code.
2637 So instead of using a libcall sequence we build up a suitable
2638 CALL_EXPR and expand the call in the normal fashion. */
2639 if (fn == NULL_TREE)
2641 tree fntype;
2643 /* This was copied from except.c, I don't know if all this is
2644 necessary in this context or not. */
2645 fn = get_identifier ("memset");
2646 fntype = build_pointer_type (void_type_node);
2647 fntype = build_function_type (fntype, NULL_TREE);
2648 fn = build_decl (FUNCTION_DECL, fn, fntype);
2649 ggc_add_tree_root (&fn, 1);
2650 DECL_EXTERNAL (fn) = 1;
2651 TREE_PUBLIC (fn) = 1;
2652 DECL_ARTIFICIAL (fn) = 1;
2653 make_decl_rtl (fn, NULL_PTR);
2654 assemble_external (fn);
2657 /* We need to make an argument list for the function call.
2659 memset has three arguments, the first is a void * addresses, the
2660 second a integer with the initialization value, the last is a
2661 size_t byte count for the copy. */
2662 arg_list
2663 = build_tree_list (NULL_TREE,
2664 make_tree (build_pointer_type (void_type_node),
2665 object));
2666 TREE_CHAIN (arg_list)
2667 = build_tree_list (NULL_TREE,
2668 make_tree (integer_type_node, const0_rtx));
2669 TREE_CHAIN (TREE_CHAIN (arg_list))
2670 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2671 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2673 /* Now we have to build up the CALL_EXPR itself. */
2674 call_expr = build1 (ADDR_EXPR,
2675 build_pointer_type (TREE_TYPE (fn)), fn);
2676 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2677 call_expr, arg_list, NULL_TREE);
2678 TREE_SIDE_EFFECTS (call_expr) = 1;
2680 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2681 #else
2682 emit_library_call (bzero_libfunc, LCT_NORMAL,
2683 VOIDmode, 2, object, Pmode, size,
2684 TYPE_MODE (integer_type_node));
2685 #endif
2689 return retval;
2692 /* Generate code to copy Y into X.
2693 Both Y and X must have the same mode, except that
2694 Y can be a constant with VOIDmode.
2695 This mode cannot be BLKmode; use emit_block_move for that.
2697 Return the last instruction emitted. */
2700 emit_move_insn (x, y)
2701 rtx x, y;
2703 enum machine_mode mode = GET_MODE (x);
2704 rtx y_cst = NULL_RTX;
2705 rtx last_insn;
2707 x = protect_from_queue (x, 1);
2708 y = protect_from_queue (y, 0);
2710 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2711 abort ();
2713 /* Never force constant_p_rtx to memory. */
2714 if (GET_CODE (y) == CONSTANT_P_RTX)
2716 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2718 y_cst = y;
2719 y = force_const_mem (mode, y);
2722 /* If X or Y are memory references, verify that their addresses are valid
2723 for the machine. */
2724 if (GET_CODE (x) == MEM
2725 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2726 && ! push_operand (x, GET_MODE (x)))
2727 || (flag_force_addr
2728 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2729 x = change_address (x, VOIDmode, XEXP (x, 0));
2731 if (GET_CODE (y) == MEM
2732 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2733 || (flag_force_addr
2734 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2735 y = change_address (y, VOIDmode, XEXP (y, 0));
2737 if (mode == BLKmode)
2738 abort ();
2740 last_insn = emit_move_insn_1 (x, y);
2742 if (y_cst && GET_CODE (x) == REG)
2743 REG_NOTES (last_insn)
2744 = gen_rtx_EXPR_LIST (REG_EQUAL, y_cst, REG_NOTES (last_insn));
2746 return last_insn;
2749 /* Low level part of emit_move_insn.
2750 Called just like emit_move_insn, but assumes X and Y
2751 are basically valid. */
2754 emit_move_insn_1 (x, y)
2755 rtx x, y;
2757 enum machine_mode mode = GET_MODE (x);
2758 enum machine_mode submode;
2759 enum mode_class class = GET_MODE_CLASS (mode);
2760 unsigned int i;
2762 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2763 abort ();
2765 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2766 return
2767 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2769 /* Expand complex moves by moving real part and imag part, if possible. */
2770 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2771 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2772 * BITS_PER_UNIT),
2773 (class == MODE_COMPLEX_INT
2774 ? MODE_INT : MODE_FLOAT),
2776 && (mov_optab->handlers[(int) submode].insn_code
2777 != CODE_FOR_nothing))
2779 /* Don't split destination if it is a stack push. */
2780 int stack = push_operand (x, GET_MODE (x));
2782 /* If this is a stack, push the highpart first, so it
2783 will be in the argument order.
2785 In that case, change_address is used only to convert
2786 the mode, not to change the address. */
2787 if (stack)
2789 /* Note that the real part always precedes the imag part in memory
2790 regardless of machine's endianness. */
2791 #ifdef STACK_GROWS_DOWNWARD
2792 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2793 (gen_rtx_MEM (submode, XEXP (x, 0)),
2794 gen_imagpart (submode, y)));
2795 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2796 (gen_rtx_MEM (submode, XEXP (x, 0)),
2797 gen_realpart (submode, y)));
2798 #else
2799 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2800 (gen_rtx_MEM (submode, XEXP (x, 0)),
2801 gen_realpart (submode, y)));
2802 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2803 (gen_rtx_MEM (submode, XEXP (x, 0)),
2804 gen_imagpart (submode, y)));
2805 #endif
2807 else
2809 rtx realpart_x, realpart_y;
2810 rtx imagpart_x, imagpart_y;
2812 /* If this is a complex value with each part being smaller than a
2813 word, the usual calling sequence will likely pack the pieces into
2814 a single register. Unfortunately, SUBREG of hard registers only
2815 deals in terms of words, so we have a problem converting input
2816 arguments to the CONCAT of two registers that is used elsewhere
2817 for complex values. If this is before reload, we can copy it into
2818 memory and reload. FIXME, we should see about using extract and
2819 insert on integer registers, but complex short and complex char
2820 variables should be rarely used. */
2821 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2822 && (reload_in_progress | reload_completed) == 0)
2824 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2825 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2827 if (packed_dest_p || packed_src_p)
2829 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2830 ? MODE_FLOAT : MODE_INT);
2832 enum machine_mode reg_mode
2833 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2835 if (reg_mode != BLKmode)
2837 rtx mem = assign_stack_temp (reg_mode,
2838 GET_MODE_SIZE (mode), 0);
2839 rtx cmem = change_address (mem, mode, NULL_RTX);
2841 cfun->cannot_inline
2842 = N_("function using short complex types cannot be inline");
2844 if (packed_dest_p)
2846 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2847 emit_move_insn_1 (cmem, y);
2848 return emit_move_insn_1 (sreg, mem);
2850 else
2852 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2853 emit_move_insn_1 (mem, sreg);
2854 return emit_move_insn_1 (x, cmem);
2860 realpart_x = gen_realpart (submode, x);
2861 realpart_y = gen_realpart (submode, y);
2862 imagpart_x = gen_imagpart (submode, x);
2863 imagpart_y = gen_imagpart (submode, y);
2865 /* Show the output dies here. This is necessary for SUBREGs
2866 of pseudos since we cannot track their lifetimes correctly;
2867 hard regs shouldn't appear here except as return values.
2868 We never want to emit such a clobber after reload. */
2869 if (x != y
2870 && ! (reload_in_progress || reload_completed)
2871 && (GET_CODE (realpart_x) == SUBREG
2872 || GET_CODE (imagpart_x) == SUBREG))
2874 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2877 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2878 (realpart_x, realpart_y));
2879 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2880 (imagpart_x, imagpart_y));
2883 return get_last_insn ();
2886 /* This will handle any multi-word mode that lacks a move_insn pattern.
2887 However, you will get better code if you define such patterns,
2888 even if they must turn into multiple assembler instructions. */
2889 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2891 rtx last_insn = 0;
2892 rtx seq, inner;
2893 int need_clobber;
2895 #ifdef PUSH_ROUNDING
2897 /* If X is a push on the stack, do the push now and replace
2898 X with a reference to the stack pointer. */
2899 if (push_operand (x, GET_MODE (x)))
2901 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2902 x = change_address (x, VOIDmode, stack_pointer_rtx);
2904 #endif
2906 /* If we are in reload, see if either operand is a MEM whose address
2907 is scheduled for replacement. */
2908 if (reload_in_progress && GET_CODE (x) == MEM
2909 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2911 rtx new = gen_rtx_MEM (GET_MODE (x), inner);
2913 MEM_COPY_ATTRIBUTES (new, x);
2914 x = new;
2916 if (reload_in_progress && GET_CODE (y) == MEM
2917 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2919 rtx new = gen_rtx_MEM (GET_MODE (y), inner);
2921 MEM_COPY_ATTRIBUTES (new, y);
2922 y = new;
2925 start_sequence ();
2927 need_clobber = 0;
2928 for (i = 0;
2929 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2930 i++)
2932 rtx xpart = operand_subword (x, i, 1, mode);
2933 rtx ypart = operand_subword (y, i, 1, mode);
2935 /* If we can't get a part of Y, put Y into memory if it is a
2936 constant. Otherwise, force it into a register. If we still
2937 can't get a part of Y, abort. */
2938 if (ypart == 0 && CONSTANT_P (y))
2940 y = force_const_mem (mode, y);
2941 ypart = operand_subword (y, i, 1, mode);
2943 else if (ypart == 0)
2944 ypart = operand_subword_force (y, i, mode);
2946 if (xpart == 0 || ypart == 0)
2947 abort ();
2949 need_clobber |= (GET_CODE (xpart) == SUBREG);
2951 last_insn = emit_move_insn (xpart, ypart);
2954 seq = gen_sequence ();
2955 end_sequence ();
2957 /* Show the output dies here. This is necessary for SUBREGs
2958 of pseudos since we cannot track their lifetimes correctly;
2959 hard regs shouldn't appear here except as return values.
2960 We never want to emit such a clobber after reload. */
2961 if (x != y
2962 && ! (reload_in_progress || reload_completed)
2963 && need_clobber != 0)
2965 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2968 emit_insn (seq);
2970 return last_insn;
2972 else
2973 abort ();
2976 /* Pushing data onto the stack. */
2978 /* Push a block of length SIZE (perhaps variable)
2979 and return an rtx to address the beginning of the block.
2980 Note that it is not possible for the value returned to be a QUEUED.
2981 The value may be virtual_outgoing_args_rtx.
2983 EXTRA is the number of bytes of padding to push in addition to SIZE.
2984 BELOW nonzero means this padding comes at low addresses;
2985 otherwise, the padding comes at high addresses. */
2988 push_block (size, extra, below)
2989 rtx size;
2990 int extra, below;
2992 register rtx temp;
2994 size = convert_modes (Pmode, ptr_mode, size, 1);
2995 if (CONSTANT_P (size))
2996 anti_adjust_stack (plus_constant (size, extra));
2997 else if (GET_CODE (size) == REG && extra == 0)
2998 anti_adjust_stack (size);
2999 else
3001 temp = copy_to_mode_reg (Pmode, size);
3002 if (extra != 0)
3003 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3004 temp, 0, OPTAB_LIB_WIDEN);
3005 anti_adjust_stack (temp);
3008 #ifndef STACK_GROWS_DOWNWARD
3009 #ifdef ARGS_GROW_DOWNWARD
3010 if (!ACCUMULATE_OUTGOING_ARGS)
3011 #else
3012 if (0)
3013 #endif
3014 #else
3015 if (1)
3016 #endif
3018 /* Return the lowest stack address when STACK or ARGS grow downward and
3019 we are not aaccumulating outgoing arguments (the c4x port uses such
3020 conventions). */
3021 temp = virtual_outgoing_args_rtx;
3022 if (extra != 0 && below)
3023 temp = plus_constant (temp, extra);
3025 else
3027 if (GET_CODE (size) == CONST_INT)
3028 temp = plus_constant (virtual_outgoing_args_rtx,
3029 -INTVAL (size) - (below ? 0 : extra));
3030 else if (extra != 0 && !below)
3031 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3032 negate_rtx (Pmode, plus_constant (size, extra)));
3033 else
3034 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3035 negate_rtx (Pmode, size));
3038 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3042 gen_push_operand ()
3044 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3047 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
3048 block of SIZE bytes. */
3050 static rtx
3051 get_push_address (size)
3052 int size;
3054 register rtx temp;
3056 if (STACK_PUSH_CODE == POST_DEC)
3057 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3058 else if (STACK_PUSH_CODE == POST_INC)
3059 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3060 else
3061 temp = stack_pointer_rtx;
3063 return copy_to_reg (temp);
3066 /* Generate code to push X onto the stack, assuming it has mode MODE and
3067 type TYPE.
3068 MODE is redundant except when X is a CONST_INT (since they don't
3069 carry mode info).
3070 SIZE is an rtx for the size of data to be copied (in bytes),
3071 needed only if X is BLKmode.
3073 ALIGN (in bits) is maximum alignment we can assume.
3075 If PARTIAL and REG are both nonzero, then copy that many of the first
3076 words of X into registers starting with REG, and push the rest of X.
3077 The amount of space pushed is decreased by PARTIAL words,
3078 rounded *down* to a multiple of PARM_BOUNDARY.
3079 REG must be a hard register in this case.
3080 If REG is zero but PARTIAL is not, take any all others actions for an
3081 argument partially in registers, but do not actually load any
3082 registers.
3084 EXTRA is the amount in bytes of extra space to leave next to this arg.
3085 This is ignored if an argument block has already been allocated.
3087 On a machine that lacks real push insns, ARGS_ADDR is the address of
3088 the bottom of the argument block for this call. We use indexing off there
3089 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3090 argument block has not been preallocated.
3092 ARGS_SO_FAR is the size of args previously pushed for this call.
3094 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3095 for arguments passed in registers. If nonzero, it will be the number
3096 of bytes required. */
3098 void
3099 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3100 args_addr, args_so_far, reg_parm_stack_space,
3101 alignment_pad)
3102 register rtx x;
3103 enum machine_mode mode;
3104 tree type;
3105 rtx size;
3106 unsigned int align;
3107 int partial;
3108 rtx reg;
3109 int extra;
3110 rtx args_addr;
3111 rtx args_so_far;
3112 int reg_parm_stack_space;
3113 rtx alignment_pad;
3115 rtx xinner;
3116 enum direction stack_direction
3117 #ifdef STACK_GROWS_DOWNWARD
3118 = downward;
3119 #else
3120 = upward;
3121 #endif
3123 /* Decide where to pad the argument: `downward' for below,
3124 `upward' for above, or `none' for don't pad it.
3125 Default is below for small data on big-endian machines; else above. */
3126 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3128 /* Invert direction if stack is post-update. */
3129 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
3130 if (where_pad != none)
3131 where_pad = (where_pad == downward ? upward : downward);
3133 xinner = x = protect_from_queue (x, 0);
3135 if (mode == BLKmode)
3137 /* Copy a block into the stack, entirely or partially. */
3139 register rtx temp;
3140 int used = partial * UNITS_PER_WORD;
3141 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3142 int skip;
3144 if (size == 0)
3145 abort ();
3147 used -= offset;
3149 /* USED is now the # of bytes we need not copy to the stack
3150 because registers will take care of them. */
3152 if (partial != 0)
3153 xinner = change_address (xinner, BLKmode,
3154 plus_constant (XEXP (xinner, 0), used));
3156 /* If the partial register-part of the arg counts in its stack size,
3157 skip the part of stack space corresponding to the registers.
3158 Otherwise, start copying to the beginning of the stack space,
3159 by setting SKIP to 0. */
3160 skip = (reg_parm_stack_space == 0) ? 0 : used;
3162 #ifdef PUSH_ROUNDING
3163 /* Do it with several push insns if that doesn't take lots of insns
3164 and if there is no difficulty with push insns that skip bytes
3165 on the stack for alignment purposes. */
3166 if (args_addr == 0
3167 && PUSH_ARGS
3168 && GET_CODE (size) == CONST_INT
3169 && skip == 0
3170 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3171 /* Here we avoid the case of a structure whose weak alignment
3172 forces many pushes of a small amount of data,
3173 and such small pushes do rounding that causes trouble. */
3174 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3175 || align >= BIGGEST_ALIGNMENT
3176 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3177 == (align / BITS_PER_UNIT)))
3178 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3180 /* Push padding now if padding above and stack grows down,
3181 or if padding below and stack grows up.
3182 But if space already allocated, this has already been done. */
3183 if (extra && args_addr == 0
3184 && where_pad != none && where_pad != stack_direction)
3185 anti_adjust_stack (GEN_INT (extra));
3187 stack_pointer_delta += INTVAL (size) - used;
3188 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
3189 INTVAL (size) - used, align);
3191 if (current_function_check_memory_usage && ! in_check_memory_usage)
3193 rtx temp;
3195 in_check_memory_usage = 1;
3196 temp = get_push_address (INTVAL (size) - used);
3197 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3198 emit_library_call (chkr_copy_bitmap_libfunc,
3199 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3200 Pmode, XEXP (xinner, 0), Pmode,
3201 GEN_INT (INTVAL (size) - used),
3202 TYPE_MODE (sizetype));
3203 else
3204 emit_library_call (chkr_set_right_libfunc,
3205 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3206 Pmode, GEN_INT (INTVAL (size) - used),
3207 TYPE_MODE (sizetype),
3208 GEN_INT (MEMORY_USE_RW),
3209 TYPE_MODE (integer_type_node));
3210 in_check_memory_usage = 0;
3213 else
3214 #endif /* PUSH_ROUNDING */
3216 rtx target;
3218 /* Otherwise make space on the stack and copy the data
3219 to the address of that space. */
3221 /* Deduct words put into registers from the size we must copy. */
3222 if (partial != 0)
3224 if (GET_CODE (size) == CONST_INT)
3225 size = GEN_INT (INTVAL (size) - used);
3226 else
3227 size = expand_binop (GET_MODE (size), sub_optab, size,
3228 GEN_INT (used), NULL_RTX, 0,
3229 OPTAB_LIB_WIDEN);
3232 /* Get the address of the stack space.
3233 In this case, we do not deal with EXTRA separately.
3234 A single stack adjust will do. */
3235 if (! args_addr)
3237 temp = push_block (size, extra, where_pad == downward);
3238 extra = 0;
3240 else if (GET_CODE (args_so_far) == CONST_INT)
3241 temp = memory_address (BLKmode,
3242 plus_constant (args_addr,
3243 skip + INTVAL (args_so_far)));
3244 else
3245 temp = memory_address (BLKmode,
3246 plus_constant (gen_rtx_PLUS (Pmode,
3247 args_addr,
3248 args_so_far),
3249 skip));
3250 if (current_function_check_memory_usage && ! in_check_memory_usage)
3252 in_check_memory_usage = 1;
3253 target = copy_to_reg (temp);
3254 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3255 emit_library_call (chkr_copy_bitmap_libfunc,
3256 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3257 target, Pmode,
3258 XEXP (xinner, 0), Pmode,
3259 size, TYPE_MODE (sizetype));
3260 else
3261 emit_library_call (chkr_set_right_libfunc,
3262 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3263 target, Pmode,
3264 size, TYPE_MODE (sizetype),
3265 GEN_INT (MEMORY_USE_RW),
3266 TYPE_MODE (integer_type_node));
3267 in_check_memory_usage = 0;
3270 target = gen_rtx_MEM (BLKmode, temp);
3272 if (type != 0)
3274 set_mem_attributes (target, type, 1);
3275 /* Function incoming arguments may overlap with sibling call
3276 outgoing arguments and we cannot allow reordering of reads
3277 from function arguments with stores to outgoing arguments
3278 of sibling calls. */
3279 MEM_ALIAS_SET (target) = 0;
3282 /* TEMP is the address of the block. Copy the data there. */
3283 if (GET_CODE (size) == CONST_INT
3284 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3286 move_by_pieces (target, xinner, INTVAL (size), align);
3287 goto ret;
3289 else
3291 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3292 enum machine_mode mode;
3294 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3295 mode != VOIDmode;
3296 mode = GET_MODE_WIDER_MODE (mode))
3298 enum insn_code code = movstr_optab[(int) mode];
3299 insn_operand_predicate_fn pred;
3301 if (code != CODE_FOR_nothing
3302 && ((GET_CODE (size) == CONST_INT
3303 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3304 <= (GET_MODE_MASK (mode) >> 1)))
3305 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3306 && (!(pred = insn_data[(int) code].operand[0].predicate)
3307 || ((*pred) (target, BLKmode)))
3308 && (!(pred = insn_data[(int) code].operand[1].predicate)
3309 || ((*pred) (xinner, BLKmode)))
3310 && (!(pred = insn_data[(int) code].operand[3].predicate)
3311 || ((*pred) (opalign, VOIDmode))))
3313 rtx op2 = convert_to_mode (mode, size, 1);
3314 rtx last = get_last_insn ();
3315 rtx pat;
3317 pred = insn_data[(int) code].operand[2].predicate;
3318 if (pred != 0 && ! (*pred) (op2, mode))
3319 op2 = copy_to_mode_reg (mode, op2);
3321 pat = GEN_FCN ((int) code) (target, xinner,
3322 op2, opalign);
3323 if (pat)
3325 emit_insn (pat);
3326 goto ret;
3328 else
3329 delete_insns_since (last);
3334 if (!ACCUMULATE_OUTGOING_ARGS)
3336 /* If the source is referenced relative to the stack pointer,
3337 copy it to another register to stabilize it. We do not need
3338 to do this if we know that we won't be changing sp. */
3340 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3341 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3342 temp = copy_to_reg (temp);
3345 /* Make inhibit_defer_pop nonzero around the library call
3346 to force it to pop the bcopy-arguments right away. */
3347 NO_DEFER_POP;
3348 #ifdef TARGET_MEM_FUNCTIONS
3349 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3350 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3351 convert_to_mode (TYPE_MODE (sizetype),
3352 size, TREE_UNSIGNED (sizetype)),
3353 TYPE_MODE (sizetype));
3354 #else
3355 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3356 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3357 convert_to_mode (TYPE_MODE (integer_type_node),
3358 size,
3359 TREE_UNSIGNED (integer_type_node)),
3360 TYPE_MODE (integer_type_node));
3361 #endif
3362 OK_DEFER_POP;
3365 else if (partial > 0)
3367 /* Scalar partly in registers. */
3369 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3370 int i;
3371 int not_stack;
3372 /* # words of start of argument
3373 that we must make space for but need not store. */
3374 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3375 int args_offset = INTVAL (args_so_far);
3376 int skip;
3378 /* Push padding now if padding above and stack grows down,
3379 or if padding below and stack grows up.
3380 But if space already allocated, this has already been done. */
3381 if (extra && args_addr == 0
3382 && where_pad != none && where_pad != stack_direction)
3383 anti_adjust_stack (GEN_INT (extra));
3385 /* If we make space by pushing it, we might as well push
3386 the real data. Otherwise, we can leave OFFSET nonzero
3387 and leave the space uninitialized. */
3388 if (args_addr == 0)
3389 offset = 0;
3391 /* Now NOT_STACK gets the number of words that we don't need to
3392 allocate on the stack. */
3393 not_stack = partial - offset;
3395 /* If the partial register-part of the arg counts in its stack size,
3396 skip the part of stack space corresponding to the registers.
3397 Otherwise, start copying to the beginning of the stack space,
3398 by setting SKIP to 0. */
3399 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3401 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3402 x = validize_mem (force_const_mem (mode, x));
3404 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3405 SUBREGs of such registers are not allowed. */
3406 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3407 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3408 x = copy_to_reg (x);
3410 /* Loop over all the words allocated on the stack for this arg. */
3411 /* We can do it by words, because any scalar bigger than a word
3412 has a size a multiple of a word. */
3413 #ifndef PUSH_ARGS_REVERSED
3414 for (i = not_stack; i < size; i++)
3415 #else
3416 for (i = size - 1; i >= not_stack; i--)
3417 #endif
3418 if (i >= not_stack + offset)
3419 emit_push_insn (operand_subword_force (x, i, mode),
3420 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3421 0, args_addr,
3422 GEN_INT (args_offset + ((i - not_stack + skip)
3423 * UNITS_PER_WORD)),
3424 reg_parm_stack_space, alignment_pad);
3426 else
3428 rtx addr;
3429 rtx target = NULL_RTX;
3430 rtx dest;
3432 /* Push padding now if padding above and stack grows down,
3433 or if padding below and stack grows up.
3434 But if space already allocated, this has already been done. */
3435 if (extra && args_addr == 0
3436 && where_pad != none && where_pad != stack_direction)
3437 anti_adjust_stack (GEN_INT (extra));
3439 #ifdef PUSH_ROUNDING
3440 if (args_addr == 0 && PUSH_ARGS)
3442 addr = gen_push_operand ();
3443 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3445 else
3446 #endif
3448 if (GET_CODE (args_so_far) == CONST_INT)
3449 addr
3450 = memory_address (mode,
3451 plus_constant (args_addr,
3452 INTVAL (args_so_far)));
3453 else
3454 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3455 args_so_far));
3456 target = addr;
3459 dest = gen_rtx_MEM (mode, addr);
3460 if (type != 0)
3462 set_mem_attributes (dest, type, 1);
3463 /* Function incoming arguments may overlap with sibling call
3464 outgoing arguments and we cannot allow reordering of reads
3465 from function arguments with stores to outgoing arguments
3466 of sibling calls. */
3467 MEM_ALIAS_SET (dest) = 0;
3470 emit_move_insn (dest, x);
3472 if (current_function_check_memory_usage && ! in_check_memory_usage)
3474 in_check_memory_usage = 1;
3475 if (target == 0)
3476 target = get_push_address (GET_MODE_SIZE (mode));
3478 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3479 emit_library_call (chkr_copy_bitmap_libfunc,
3480 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3481 Pmode, XEXP (x, 0), Pmode,
3482 GEN_INT (GET_MODE_SIZE (mode)),
3483 TYPE_MODE (sizetype));
3484 else
3485 emit_library_call (chkr_set_right_libfunc,
3486 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3487 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
3488 TYPE_MODE (sizetype),
3489 GEN_INT (MEMORY_USE_RW),
3490 TYPE_MODE (integer_type_node));
3491 in_check_memory_usage = 0;
3495 ret:
3496 /* If part should go in registers, copy that part
3497 into the appropriate registers. Do this now, at the end,
3498 since mem-to-mem copies above may do function calls. */
3499 if (partial > 0 && reg != 0)
3501 /* Handle calls that pass values in multiple non-contiguous locations.
3502 The Irix 6 ABI has examples of this. */
3503 if (GET_CODE (reg) == PARALLEL)
3504 emit_group_load (reg, x, -1, align); /* ??? size? */
3505 else
3506 move_block_to_reg (REGNO (reg), x, partial, mode);
3509 if (extra && args_addr == 0 && where_pad == stack_direction)
3510 anti_adjust_stack (GEN_INT (extra));
3512 if (alignment_pad && args_addr == 0)
3513 anti_adjust_stack (alignment_pad);
3516 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3517 operations. */
3519 static rtx
3520 get_subtarget (x)
3521 rtx x;
3523 return ((x == 0
3524 /* Only registers can be subtargets. */
3525 || GET_CODE (x) != REG
3526 /* If the register is readonly, it can't be set more than once. */
3527 || RTX_UNCHANGING_P (x)
3528 /* Don't use hard regs to avoid extending their life. */
3529 || REGNO (x) < FIRST_PSEUDO_REGISTER
3530 /* Avoid subtargets inside loops,
3531 since they hide some invariant expressions. */
3532 || preserve_subexpressions_p ())
3533 ? 0 : x);
3536 /* Expand an assignment that stores the value of FROM into TO.
3537 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3538 (This may contain a QUEUED rtx;
3539 if the value is constant, this rtx is a constant.)
3540 Otherwise, the returned value is NULL_RTX.
3542 SUGGEST_REG is no longer actually used.
3543 It used to mean, copy the value through a register
3544 and return that register, if that is possible.
3545 We now use WANT_VALUE to decide whether to do this. */
3548 expand_assignment (to, from, want_value, suggest_reg)
3549 tree to, from;
3550 int want_value;
3551 int suggest_reg ATTRIBUTE_UNUSED;
3553 register rtx to_rtx = 0;
3554 rtx result;
3556 /* Don't crash if the lhs of the assignment was erroneous. */
3558 if (TREE_CODE (to) == ERROR_MARK)
3560 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3561 return want_value ? result : NULL_RTX;
3564 /* Assignment of a structure component needs special treatment
3565 if the structure component's rtx is not simply a MEM.
3566 Assignment of an array element at a constant index, and assignment of
3567 an array element in an unaligned packed structure field, has the same
3568 problem. */
3570 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3571 || TREE_CODE (to) == ARRAY_REF)
3573 enum machine_mode mode1;
3574 HOST_WIDE_INT bitsize, bitpos;
3575 tree offset;
3576 int unsignedp;
3577 int volatilep = 0;
3578 tree tem;
3579 unsigned int alignment;
3581 push_temp_slots ();
3582 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3583 &unsignedp, &volatilep, &alignment);
3585 /* If we are going to use store_bit_field and extract_bit_field,
3586 make sure to_rtx will be safe for multiple use. */
3588 if (mode1 == VOIDmode && want_value)
3589 tem = stabilize_reference (tem);
3591 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3592 if (offset != 0)
3594 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3596 if (GET_CODE (to_rtx) != MEM)
3597 abort ();
3599 if (GET_MODE (offset_rtx) != ptr_mode)
3601 #ifdef POINTERS_EXTEND_UNSIGNED
3602 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3603 #else
3604 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3605 #endif
3608 /* A constant address in TO_RTX can have VOIDmode, we must not try
3609 to call force_reg for that case. Avoid that case. */
3610 if (GET_CODE (to_rtx) == MEM
3611 && GET_MODE (to_rtx) == BLKmode
3612 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3613 && bitsize
3614 && (bitpos % bitsize) == 0
3615 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3616 && alignment == GET_MODE_ALIGNMENT (mode1))
3618 rtx temp = change_address (to_rtx, mode1,
3619 plus_constant (XEXP (to_rtx, 0),
3620 (bitpos /
3621 BITS_PER_UNIT)));
3622 if (GET_CODE (XEXP (temp, 0)) == REG)
3623 to_rtx = temp;
3624 else
3625 to_rtx = change_address (to_rtx, mode1,
3626 force_reg (GET_MODE (XEXP (temp, 0)),
3627 XEXP (temp, 0)));
3628 bitpos = 0;
3631 to_rtx = change_address (to_rtx, VOIDmode,
3632 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3633 force_reg (ptr_mode,
3634 offset_rtx)));
3637 if (volatilep)
3639 if (GET_CODE (to_rtx) == MEM)
3641 /* When the offset is zero, to_rtx is the address of the
3642 structure we are storing into, and hence may be shared.
3643 We must make a new MEM before setting the volatile bit. */
3644 if (offset == 0)
3645 to_rtx = copy_rtx (to_rtx);
3647 MEM_VOLATILE_P (to_rtx) = 1;
3649 #if 0 /* This was turned off because, when a field is volatile
3650 in an object which is not volatile, the object may be in a register,
3651 and then we would abort over here. */
3652 else
3653 abort ();
3654 #endif
3657 if (TREE_CODE (to) == COMPONENT_REF
3658 && TREE_READONLY (TREE_OPERAND (to, 1)))
3660 if (offset == 0)
3661 to_rtx = copy_rtx (to_rtx);
3663 RTX_UNCHANGING_P (to_rtx) = 1;
3666 /* Check the access. */
3667 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3669 rtx to_addr;
3670 int size;
3671 int best_mode_size;
3672 enum machine_mode best_mode;
3674 best_mode = get_best_mode (bitsize, bitpos,
3675 TYPE_ALIGN (TREE_TYPE (tem)),
3676 mode1, volatilep);
3677 if (best_mode == VOIDmode)
3678 best_mode = QImode;
3680 best_mode_size = GET_MODE_BITSIZE (best_mode);
3681 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3682 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3683 size *= GET_MODE_SIZE (best_mode);
3685 /* Check the access right of the pointer. */
3686 in_check_memory_usage = 1;
3687 if (size)
3688 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3689 VOIDmode, 3, to_addr, Pmode,
3690 GEN_INT (size), TYPE_MODE (sizetype),
3691 GEN_INT (MEMORY_USE_WO),
3692 TYPE_MODE (integer_type_node));
3693 in_check_memory_usage = 0;
3696 /* If this is a varying-length object, we must get the address of
3697 the source and do an explicit block move. */
3698 if (bitsize < 0)
3700 unsigned int from_align;
3701 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3702 rtx inner_to_rtx
3703 = change_address (to_rtx, VOIDmode,
3704 plus_constant (XEXP (to_rtx, 0),
3705 bitpos / BITS_PER_UNIT));
3707 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
3708 MIN (alignment, from_align));
3709 free_temp_slots ();
3710 pop_temp_slots ();
3711 return to_rtx;
3713 else
3715 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3716 (want_value
3717 /* Spurious cast for HPUX compiler. */
3718 ? ((enum machine_mode)
3719 TYPE_MODE (TREE_TYPE (to)))
3720 : VOIDmode),
3721 unsignedp,
3722 alignment,
3723 int_size_in_bytes (TREE_TYPE (tem)),
3724 get_alias_set (to));
3726 preserve_temp_slots (result);
3727 free_temp_slots ();
3728 pop_temp_slots ();
3730 /* If the value is meaningful, convert RESULT to the proper mode.
3731 Otherwise, return nothing. */
3732 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3733 TYPE_MODE (TREE_TYPE (from)),
3734 result,
3735 TREE_UNSIGNED (TREE_TYPE (to)))
3736 : NULL_RTX);
3740 /* If the rhs is a function call and its value is not an aggregate,
3741 call the function before we start to compute the lhs.
3742 This is needed for correct code for cases such as
3743 val = setjmp (buf) on machines where reference to val
3744 requires loading up part of an address in a separate insn.
3746 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3747 since it might be a promoted variable where the zero- or sign- extension
3748 needs to be done. Handling this in the normal way is safe because no
3749 computation is done before the call. */
3750 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3751 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3752 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3753 && GET_CODE (DECL_RTL (to)) == REG))
3755 rtx value;
3757 push_temp_slots ();
3758 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3759 if (to_rtx == 0)
3760 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3762 /* Handle calls that return values in multiple non-contiguous locations.
3763 The Irix 6 ABI has examples of this. */
3764 if (GET_CODE (to_rtx) == PARALLEL)
3765 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3766 TYPE_ALIGN (TREE_TYPE (from)));
3767 else if (GET_MODE (to_rtx) == BLKmode)
3768 emit_block_move (to_rtx, value, expr_size (from),
3769 TYPE_ALIGN (TREE_TYPE (from)));
3770 else
3772 #ifdef POINTERS_EXTEND_UNSIGNED
3773 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3774 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3775 value = convert_memory_address (GET_MODE (to_rtx), value);
3776 #endif
3777 emit_move_insn (to_rtx, value);
3779 preserve_temp_slots (to_rtx);
3780 free_temp_slots ();
3781 pop_temp_slots ();
3782 return want_value ? to_rtx : NULL_RTX;
3785 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3786 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3788 if (to_rtx == 0)
3790 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3791 if (GET_CODE (to_rtx) == MEM)
3792 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3795 /* Don't move directly into a return register. */
3796 if (TREE_CODE (to) == RESULT_DECL
3797 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3799 rtx temp;
3801 push_temp_slots ();
3802 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3804 if (GET_CODE (to_rtx) == PARALLEL)
3805 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3806 TYPE_ALIGN (TREE_TYPE (from)));
3807 else
3808 emit_move_insn (to_rtx, temp);
3810 preserve_temp_slots (to_rtx);
3811 free_temp_slots ();
3812 pop_temp_slots ();
3813 return want_value ? to_rtx : NULL_RTX;
3816 /* In case we are returning the contents of an object which overlaps
3817 the place the value is being stored, use a safe function when copying
3818 a value through a pointer into a structure value return block. */
3819 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3820 && current_function_returns_struct
3821 && !current_function_returns_pcc_struct)
3823 rtx from_rtx, size;
3825 push_temp_slots ();
3826 size = expr_size (from);
3827 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3828 EXPAND_MEMORY_USE_DONT);
3830 /* Copy the rights of the bitmap. */
3831 if (current_function_check_memory_usage)
3832 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3833 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3834 XEXP (from_rtx, 0), Pmode,
3835 convert_to_mode (TYPE_MODE (sizetype),
3836 size, TREE_UNSIGNED (sizetype)),
3837 TYPE_MODE (sizetype));
3839 #ifdef TARGET_MEM_FUNCTIONS
3840 emit_library_call (memmove_libfunc, LCT_NORMAL,
3841 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3842 XEXP (from_rtx, 0), Pmode,
3843 convert_to_mode (TYPE_MODE (sizetype),
3844 size, TREE_UNSIGNED (sizetype)),
3845 TYPE_MODE (sizetype));
3846 #else
3847 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3848 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3849 XEXP (to_rtx, 0), Pmode,
3850 convert_to_mode (TYPE_MODE (integer_type_node),
3851 size, TREE_UNSIGNED (integer_type_node)),
3852 TYPE_MODE (integer_type_node));
3853 #endif
3855 preserve_temp_slots (to_rtx);
3856 free_temp_slots ();
3857 pop_temp_slots ();
3858 return want_value ? to_rtx : NULL_RTX;
3861 /* Compute FROM and store the value in the rtx we got. */
3863 push_temp_slots ();
3864 result = store_expr (from, to_rtx, want_value);
3865 preserve_temp_slots (result);
3866 free_temp_slots ();
3867 pop_temp_slots ();
3868 return want_value ? result : NULL_RTX;
3871 /* Generate code for computing expression EXP,
3872 and storing the value into TARGET.
3873 TARGET may contain a QUEUED rtx.
3875 If WANT_VALUE is nonzero, return a copy of the value
3876 not in TARGET, so that we can be sure to use the proper
3877 value in a containing expression even if TARGET has something
3878 else stored in it. If possible, we copy the value through a pseudo
3879 and return that pseudo. Or, if the value is constant, we try to
3880 return the constant. In some cases, we return a pseudo
3881 copied *from* TARGET.
3883 If the mode is BLKmode then we may return TARGET itself.
3884 It turns out that in BLKmode it doesn't cause a problem.
3885 because C has no operators that could combine two different
3886 assignments into the same BLKmode object with different values
3887 with no sequence point. Will other languages need this to
3888 be more thorough?
3890 If WANT_VALUE is 0, we return NULL, to make sure
3891 to catch quickly any cases where the caller uses the value
3892 and fails to set WANT_VALUE. */
3895 store_expr (exp, target, want_value)
3896 register tree exp;
3897 register rtx target;
3898 int want_value;
3900 register rtx temp;
3901 int dont_return_target = 0;
3903 if (TREE_CODE (exp) == COMPOUND_EXPR)
3905 /* Perform first part of compound expression, then assign from second
3906 part. */
3907 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3908 emit_queue ();
3909 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3911 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3913 /* For conditional expression, get safe form of the target. Then
3914 test the condition, doing the appropriate assignment on either
3915 side. This avoids the creation of unnecessary temporaries.
3916 For non-BLKmode, it is more efficient not to do this. */
3918 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3920 emit_queue ();
3921 target = protect_from_queue (target, 1);
3923 do_pending_stack_adjust ();
3924 NO_DEFER_POP;
3925 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3926 start_cleanup_deferral ();
3927 store_expr (TREE_OPERAND (exp, 1), target, 0);
3928 end_cleanup_deferral ();
3929 emit_queue ();
3930 emit_jump_insn (gen_jump (lab2));
3931 emit_barrier ();
3932 emit_label (lab1);
3933 start_cleanup_deferral ();
3934 store_expr (TREE_OPERAND (exp, 2), target, 0);
3935 end_cleanup_deferral ();
3936 emit_queue ();
3937 emit_label (lab2);
3938 OK_DEFER_POP;
3940 return want_value ? target : NULL_RTX;
3942 else if (queued_subexp_p (target))
3943 /* If target contains a postincrement, let's not risk
3944 using it as the place to generate the rhs. */
3946 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3948 /* Expand EXP into a new pseudo. */
3949 temp = gen_reg_rtx (GET_MODE (target));
3950 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3952 else
3953 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3955 /* If target is volatile, ANSI requires accessing the value
3956 *from* the target, if it is accessed. So make that happen.
3957 In no case return the target itself. */
3958 if (! MEM_VOLATILE_P (target) && want_value)
3959 dont_return_target = 1;
3961 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3962 && GET_MODE (target) != BLKmode)
3963 /* If target is in memory and caller wants value in a register instead,
3964 arrange that. Pass TARGET as target for expand_expr so that,
3965 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3966 We know expand_expr will not use the target in that case.
3967 Don't do this if TARGET is volatile because we are supposed
3968 to write it and then read it. */
3970 temp = expand_expr (exp, target, GET_MODE (target), 0);
3971 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3972 temp = copy_to_reg (temp);
3973 dont_return_target = 1;
3975 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3976 /* If this is an scalar in a register that is stored in a wider mode
3977 than the declared mode, compute the result into its declared mode
3978 and then convert to the wider mode. Our value is the computed
3979 expression. */
3981 /* If we don't want a value, we can do the conversion inside EXP,
3982 which will often result in some optimizations. Do the conversion
3983 in two steps: first change the signedness, if needed, then
3984 the extend. But don't do this if the type of EXP is a subtype
3985 of something else since then the conversion might involve
3986 more than just converting modes. */
3987 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3988 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3990 if (TREE_UNSIGNED (TREE_TYPE (exp))
3991 != SUBREG_PROMOTED_UNSIGNED_P (target))
3993 = convert
3994 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3995 TREE_TYPE (exp)),
3996 exp);
3998 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3999 SUBREG_PROMOTED_UNSIGNED_P (target)),
4000 exp);
4003 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4005 /* If TEMP is a volatile MEM and we want a result value, make
4006 the access now so it gets done only once. Likewise if
4007 it contains TARGET. */
4008 if (GET_CODE (temp) == MEM && want_value
4009 && (MEM_VOLATILE_P (temp)
4010 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4011 temp = copy_to_reg (temp);
4013 /* If TEMP is a VOIDmode constant, use convert_modes to make
4014 sure that we properly convert it. */
4015 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4016 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4017 TYPE_MODE (TREE_TYPE (exp)), temp,
4018 SUBREG_PROMOTED_UNSIGNED_P (target));
4020 convert_move (SUBREG_REG (target), temp,
4021 SUBREG_PROMOTED_UNSIGNED_P (target));
4023 /* If we promoted a constant, change the mode back down to match
4024 target. Otherwise, the caller might get confused by a result whose
4025 mode is larger than expected. */
4027 if (want_value && GET_MODE (temp) != GET_MODE (target)
4028 && GET_MODE (temp) != VOIDmode)
4030 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
4031 SUBREG_PROMOTED_VAR_P (temp) = 1;
4032 SUBREG_PROMOTED_UNSIGNED_P (temp)
4033 = SUBREG_PROMOTED_UNSIGNED_P (target);
4036 return want_value ? temp : NULL_RTX;
4038 else
4040 temp = expand_expr (exp, target, GET_MODE (target), 0);
4041 /* Return TARGET if it's a specified hardware register.
4042 If TARGET is a volatile mem ref, either return TARGET
4043 or return a reg copied *from* TARGET; ANSI requires this.
4045 Otherwise, if TEMP is not TARGET, return TEMP
4046 if it is constant (for efficiency),
4047 or if we really want the correct value. */
4048 if (!(target && GET_CODE (target) == REG
4049 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4050 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4051 && ! rtx_equal_p (temp, target)
4052 && (CONSTANT_P (temp) || want_value))
4053 dont_return_target = 1;
4056 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4057 the same as that of TARGET, adjust the constant. This is needed, for
4058 example, in case it is a CONST_DOUBLE and we want only a word-sized
4059 value. */
4060 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4061 && TREE_CODE (exp) != ERROR_MARK
4062 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4063 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4064 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4066 if (current_function_check_memory_usage
4067 && GET_CODE (target) == MEM
4068 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
4070 in_check_memory_usage = 1;
4071 if (GET_CODE (temp) == MEM)
4072 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
4073 VOIDmode, 3, XEXP (target, 0), Pmode,
4074 XEXP (temp, 0), Pmode,
4075 expr_size (exp), TYPE_MODE (sizetype));
4076 else
4077 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
4078 VOIDmode, 3, XEXP (target, 0), Pmode,
4079 expr_size (exp), TYPE_MODE (sizetype),
4080 GEN_INT (MEMORY_USE_WO),
4081 TYPE_MODE (integer_type_node));
4082 in_check_memory_usage = 0;
4085 /* If value was not generated in the target, store it there.
4086 Convert the value to TARGET's type first if nec. */
4087 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4088 one or both of them are volatile memory refs, we have to distinguish
4089 two cases:
4090 - expand_expr has used TARGET. In this case, we must not generate
4091 another copy. This can be detected by TARGET being equal according
4092 to == .
4093 - expand_expr has not used TARGET - that means that the source just
4094 happens to have the same RTX form. Since temp will have been created
4095 by expand_expr, it will compare unequal according to == .
4096 We must generate a copy in this case, to reach the correct number
4097 of volatile memory references. */
4099 if ((! rtx_equal_p (temp, target)
4100 || (temp != target && (side_effects_p (temp)
4101 || side_effects_p (target))))
4102 && TREE_CODE (exp) != ERROR_MARK)
4104 target = protect_from_queue (target, 1);
4105 if (GET_MODE (temp) != GET_MODE (target)
4106 && GET_MODE (temp) != VOIDmode)
4108 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4109 if (dont_return_target)
4111 /* In this case, we will return TEMP,
4112 so make sure it has the proper mode.
4113 But don't forget to store the value into TARGET. */
4114 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4115 emit_move_insn (target, temp);
4117 else
4118 convert_move (target, temp, unsignedp);
4121 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4123 /* Handle copying a string constant into an array.
4124 The string constant may be shorter than the array.
4125 So copy just the string's actual length, and clear the rest. */
4126 rtx size;
4127 rtx addr;
4129 /* Get the size of the data type of the string,
4130 which is actually the size of the target. */
4131 size = expr_size (exp);
4132 if (GET_CODE (size) == CONST_INT
4133 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4134 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
4135 else
4137 /* Compute the size of the data to copy from the string. */
4138 tree copy_size
4139 = size_binop (MIN_EXPR,
4140 make_tree (sizetype, size),
4141 size_int (TREE_STRING_LENGTH (exp)));
4142 unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
4143 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4144 VOIDmode, 0);
4145 rtx label = 0;
4147 /* Copy that much. */
4148 emit_block_move (target, temp, copy_size_rtx,
4149 TYPE_ALIGN (TREE_TYPE (exp)));
4151 /* Figure out how much is left in TARGET that we have to clear.
4152 Do all calculations in ptr_mode. */
4154 addr = XEXP (target, 0);
4155 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4157 if (GET_CODE (copy_size_rtx) == CONST_INT)
4159 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
4160 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
4161 align = MIN (align,
4162 (unsigned int) (BITS_PER_UNIT
4163 * (INTVAL (copy_size_rtx)
4164 & - INTVAL (copy_size_rtx))));
4166 else
4168 addr = force_reg (ptr_mode, addr);
4169 addr = expand_binop (ptr_mode, add_optab, addr,
4170 copy_size_rtx, NULL_RTX, 0,
4171 OPTAB_LIB_WIDEN);
4173 size = expand_binop (ptr_mode, sub_optab, size,
4174 copy_size_rtx, NULL_RTX, 0,
4175 OPTAB_LIB_WIDEN);
4177 align = BITS_PER_UNIT;
4178 label = gen_label_rtx ();
4179 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4180 GET_MODE (size), 0, 0, label);
4182 align = MIN (align, expr_align (copy_size));
4184 if (size != const0_rtx)
4186 rtx dest = gen_rtx_MEM (BLKmode, addr);
4188 MEM_COPY_ATTRIBUTES (dest, target);
4190 /* Be sure we can write on ADDR. */
4191 in_check_memory_usage = 1;
4192 if (current_function_check_memory_usage)
4193 emit_library_call (chkr_check_addr_libfunc,
4194 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4195 addr, Pmode,
4196 size, TYPE_MODE (sizetype),
4197 GEN_INT (MEMORY_USE_WO),
4198 TYPE_MODE (integer_type_node));
4199 in_check_memory_usage = 0;
4200 clear_storage (dest, size, align);
4203 if (label)
4204 emit_label (label);
4207 /* Handle calls that return values in multiple non-contiguous locations.
4208 The Irix 6 ABI has examples of this. */
4209 else if (GET_CODE (target) == PARALLEL)
4210 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4211 TYPE_ALIGN (TREE_TYPE (exp)));
4212 else if (GET_MODE (temp) == BLKmode)
4213 emit_block_move (target, temp, expr_size (exp),
4214 TYPE_ALIGN (TREE_TYPE (exp)));
4215 else
4216 emit_move_insn (target, temp);
4219 /* If we don't want a value, return NULL_RTX. */
4220 if (! want_value)
4221 return NULL_RTX;
4223 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4224 ??? The latter test doesn't seem to make sense. */
4225 else if (dont_return_target && GET_CODE (temp) != MEM)
4226 return temp;
4228 /* Return TARGET itself if it is a hard register. */
4229 else if (want_value && GET_MODE (target) != BLKmode
4230 && ! (GET_CODE (target) == REG
4231 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4232 return copy_to_reg (target);
4234 else
4235 return target;
4238 /* Return 1 if EXP just contains zeros. */
4240 static int
4241 is_zeros_p (exp)
4242 tree exp;
4244 tree elt;
4246 switch (TREE_CODE (exp))
4248 case CONVERT_EXPR:
4249 case NOP_EXPR:
4250 case NON_LVALUE_EXPR:
4251 return is_zeros_p (TREE_OPERAND (exp, 0));
4253 case INTEGER_CST:
4254 return integer_zerop (exp);
4256 case COMPLEX_CST:
4257 return
4258 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4260 case REAL_CST:
4261 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4263 case CONSTRUCTOR:
4264 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4265 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4266 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4267 if (! is_zeros_p (TREE_VALUE (elt)))
4268 return 0;
4270 return 1;
4272 default:
4273 return 0;
4277 /* Return 1 if EXP contains mostly (3/4) zeros. */
4279 static int
4280 mostly_zeros_p (exp)
4281 tree exp;
4283 if (TREE_CODE (exp) == CONSTRUCTOR)
4285 int elts = 0, zeros = 0;
4286 tree elt = CONSTRUCTOR_ELTS (exp);
4287 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4289 /* If there are no ranges of true bits, it is all zero. */
4290 return elt == NULL_TREE;
4292 for (; elt; elt = TREE_CHAIN (elt))
4294 /* We do not handle the case where the index is a RANGE_EXPR,
4295 so the statistic will be somewhat inaccurate.
4296 We do make a more accurate count in store_constructor itself,
4297 so since this function is only used for nested array elements,
4298 this should be close enough. */
4299 if (mostly_zeros_p (TREE_VALUE (elt)))
4300 zeros++;
4301 elts++;
4304 return 4 * zeros >= 3 * elts;
4307 return is_zeros_p (exp);
4310 /* Helper function for store_constructor.
4311 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4312 TYPE is the type of the CONSTRUCTOR, not the element type.
4313 ALIGN and CLEARED are as for store_constructor.
4314 ALIAS_SET is the alias set to use for any stores.
4316 This provides a recursive shortcut back to store_constructor when it isn't
4317 necessary to go through store_field. This is so that we can pass through
4318 the cleared field to let store_constructor know that we may not have to
4319 clear a substructure if the outer structure has already been cleared. */
4321 static void
4322 store_constructor_field (target, bitsize, bitpos,
4323 mode, exp, type, align, cleared, alias_set)
4324 rtx target;
4325 unsigned HOST_WIDE_INT bitsize;
4326 HOST_WIDE_INT bitpos;
4327 enum machine_mode mode;
4328 tree exp, type;
4329 unsigned int align;
4330 int cleared;
4331 int alias_set;
4333 if (TREE_CODE (exp) == CONSTRUCTOR
4334 && bitpos % BITS_PER_UNIT == 0
4335 /* If we have a non-zero bitpos for a register target, then we just
4336 let store_field do the bitfield handling. This is unlikely to
4337 generate unnecessary clear instructions anyways. */
4338 && (bitpos == 0 || GET_CODE (target) == MEM))
4340 if (bitpos != 0)
4341 target
4342 = change_address (target,
4343 GET_MODE (target) == BLKmode
4344 || 0 != (bitpos
4345 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4346 ? BLKmode : VOIDmode,
4347 plus_constant (XEXP (target, 0),
4348 bitpos / BITS_PER_UNIT));
4351 /* Show the alignment may no longer be what it was and update the alias
4352 set, if required. */
4353 if (bitpos != 0)
4354 align = MIN (align, (unsigned int) bitpos & - bitpos);
4355 if (GET_CODE (target) == MEM)
4356 MEM_ALIAS_SET (target) = alias_set;
4358 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4360 else
4361 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4362 int_size_in_bytes (type), alias_set);
4365 /* Store the value of constructor EXP into the rtx TARGET.
4366 TARGET is either a REG or a MEM.
4367 ALIGN is the maximum known alignment for TARGET.
4368 CLEARED is true if TARGET is known to have been zero'd.
4369 SIZE is the number of bytes of TARGET we are allowed to modify: this
4370 may not be the same as the size of EXP if we are assigning to a field
4371 which has been packed to exclude padding bits. */
4373 static void
4374 store_constructor (exp, target, align, cleared, size)
4375 tree exp;
4376 rtx target;
4377 unsigned int align;
4378 int cleared;
4379 HOST_WIDE_INT size;
4381 tree type = TREE_TYPE (exp);
4382 #ifdef WORD_REGISTER_OPERATIONS
4383 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4384 #endif
4386 /* We know our target cannot conflict, since safe_from_p has been called. */
4387 #if 0
4388 /* Don't try copying piece by piece into a hard register
4389 since that is vulnerable to being clobbered by EXP.
4390 Instead, construct in a pseudo register and then copy it all. */
4391 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4393 rtx temp = gen_reg_rtx (GET_MODE (target));
4394 store_constructor (exp, temp, align, cleared, size);
4395 emit_move_insn (target, temp);
4396 return;
4398 #endif
4400 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4401 || TREE_CODE (type) == QUAL_UNION_TYPE)
4403 register tree elt;
4405 /* Inform later passes that the whole union value is dead. */
4406 if ((TREE_CODE (type) == UNION_TYPE
4407 || TREE_CODE (type) == QUAL_UNION_TYPE)
4408 && ! cleared)
4410 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4412 /* If the constructor is empty, clear the union. */
4413 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4414 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4417 /* If we are building a static constructor into a register,
4418 set the initial value as zero so we can fold the value into
4419 a constant. But if more than one register is involved,
4420 this probably loses. */
4421 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4422 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4424 if (! cleared)
4425 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4427 cleared = 1;
4430 /* If the constructor has fewer fields than the structure
4431 or if we are initializing the structure to mostly zeros,
4432 clear the whole structure first. Don't do this is TARGET is
4433 register whose mode size isn't equal to SIZE since clear_storage
4434 can't handle this case. */
4435 else if (size > 0
4436 && ((list_length (CONSTRUCTOR_ELTS (exp))
4437 != fields_length (type))
4438 || mostly_zeros_p (exp))
4439 && (GET_CODE (target) != REG
4440 || (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) == size))
4442 if (! cleared)
4443 clear_storage (target, GEN_INT (size), align);
4445 cleared = 1;
4447 else if (! cleared)
4448 /* Inform later passes that the old value is dead. */
4449 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4451 /* Store each element of the constructor into
4452 the corresponding field of TARGET. */
4454 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4456 register tree field = TREE_PURPOSE (elt);
4457 #ifdef WORD_REGISTER_OPERATIONS
4458 tree value = TREE_VALUE (elt);
4459 #endif
4460 register enum machine_mode mode;
4461 HOST_WIDE_INT bitsize;
4462 HOST_WIDE_INT bitpos = 0;
4463 int unsignedp;
4464 tree offset;
4465 rtx to_rtx = target;
4467 /* Just ignore missing fields.
4468 We cleared the whole structure, above,
4469 if any fields are missing. */
4470 if (field == 0)
4471 continue;
4473 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4474 continue;
4476 if (host_integerp (DECL_SIZE (field), 1))
4477 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4478 else
4479 bitsize = -1;
4481 unsignedp = TREE_UNSIGNED (field);
4482 mode = DECL_MODE (field);
4483 if (DECL_BIT_FIELD (field))
4484 mode = VOIDmode;
4486 offset = DECL_FIELD_OFFSET (field);
4487 if (host_integerp (offset, 0)
4488 && host_integerp (bit_position (field), 0))
4490 bitpos = int_bit_position (field);
4491 offset = 0;
4493 else
4494 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4496 if (offset)
4498 rtx offset_rtx;
4500 if (contains_placeholder_p (offset))
4501 offset = build (WITH_RECORD_EXPR, sizetype,
4502 offset, make_tree (TREE_TYPE (exp), target));
4504 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4505 if (GET_CODE (to_rtx) != MEM)
4506 abort ();
4508 if (GET_MODE (offset_rtx) != ptr_mode)
4510 #ifdef POINTERS_EXTEND_UNSIGNED
4511 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4512 #else
4513 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4514 #endif
4517 to_rtx
4518 = change_address (to_rtx, VOIDmode,
4519 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4520 force_reg (ptr_mode,
4521 offset_rtx)));
4522 align = DECL_OFFSET_ALIGN (field);
4525 if (TREE_READONLY (field))
4527 if (GET_CODE (to_rtx) == MEM)
4528 to_rtx = copy_rtx (to_rtx);
4530 RTX_UNCHANGING_P (to_rtx) = 1;
4533 #ifdef WORD_REGISTER_OPERATIONS
4534 /* If this initializes a field that is smaller than a word, at the
4535 start of a word, try to widen it to a full word.
4536 This special case allows us to output C++ member function
4537 initializations in a form that the optimizers can understand. */
4538 if (GET_CODE (target) == REG
4539 && bitsize < BITS_PER_WORD
4540 && bitpos % BITS_PER_WORD == 0
4541 && GET_MODE_CLASS (mode) == MODE_INT
4542 && TREE_CODE (value) == INTEGER_CST
4543 && exp_size >= 0
4544 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4546 tree type = TREE_TYPE (value);
4547 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4549 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4550 value = convert (type, value);
4552 if (BYTES_BIG_ENDIAN)
4553 value
4554 = fold (build (LSHIFT_EXPR, type, value,
4555 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4556 bitsize = BITS_PER_WORD;
4557 mode = word_mode;
4559 #endif
4560 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4561 TREE_VALUE (elt), type, align, cleared,
4562 (DECL_NONADDRESSABLE_P (field)
4563 && GET_CODE (to_rtx) == MEM)
4564 ? MEM_ALIAS_SET (to_rtx)
4565 : get_alias_set (TREE_TYPE (field)));
4568 else if (TREE_CODE (type) == ARRAY_TYPE)
4570 register tree elt;
4571 register int i;
4572 int need_to_clear;
4573 tree domain = TYPE_DOMAIN (type);
4574 tree elttype = TREE_TYPE (type);
4575 int const_bounds_p = (host_integerp (TYPE_MIN_VALUE (domain), 0)
4576 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4577 HOST_WIDE_INT minelt;
4578 HOST_WIDE_INT maxelt;
4580 /* If we have constant bounds for the range of the type, get them. */
4581 if (const_bounds_p)
4583 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4584 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4587 /* If the constructor has fewer elements than the array,
4588 clear the whole array first. Similarly if this is
4589 static constructor of a non-BLKmode object. */
4590 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4591 need_to_clear = 1;
4592 else
4594 HOST_WIDE_INT count = 0, zero_count = 0;
4595 need_to_clear = ! const_bounds_p;
4597 /* This loop is a more accurate version of the loop in
4598 mostly_zeros_p (it handles RANGE_EXPR in an index).
4599 It is also needed to check for missing elements. */
4600 for (elt = CONSTRUCTOR_ELTS (exp);
4601 elt != NULL_TREE && ! need_to_clear;
4602 elt = TREE_CHAIN (elt))
4604 tree index = TREE_PURPOSE (elt);
4605 HOST_WIDE_INT this_node_count;
4607 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4609 tree lo_index = TREE_OPERAND (index, 0);
4610 tree hi_index = TREE_OPERAND (index, 1);
4612 if (! host_integerp (lo_index, 1)
4613 || ! host_integerp (hi_index, 1))
4615 need_to_clear = 1;
4616 break;
4619 this_node_count = (tree_low_cst (hi_index, 1)
4620 - tree_low_cst (lo_index, 1) + 1);
4622 else
4623 this_node_count = 1;
4625 count += this_node_count;
4626 if (mostly_zeros_p (TREE_VALUE (elt)))
4627 zero_count += this_node_count;
4630 /* Clear the entire array first if there are any missing elements,
4631 or if the incidence of zero elements is >= 75%. */
4632 if (! need_to_clear
4633 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4634 need_to_clear = 1;
4637 if (need_to_clear && size > 0)
4639 if (! cleared)
4640 clear_storage (target, GEN_INT (size), align);
4641 cleared = 1;
4643 else
4644 /* Inform later passes that the old value is dead. */
4645 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4647 /* Store each element of the constructor into
4648 the corresponding element of TARGET, determined
4649 by counting the elements. */
4650 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4651 elt;
4652 elt = TREE_CHAIN (elt), i++)
4654 register enum machine_mode mode;
4655 HOST_WIDE_INT bitsize;
4656 HOST_WIDE_INT bitpos;
4657 int unsignedp;
4658 tree value = TREE_VALUE (elt);
4659 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4660 tree index = TREE_PURPOSE (elt);
4661 rtx xtarget = target;
4663 if (cleared && is_zeros_p (value))
4664 continue;
4666 unsignedp = TREE_UNSIGNED (elttype);
4667 mode = TYPE_MODE (elttype);
4668 if (mode == BLKmode)
4669 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4670 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4671 : -1);
4672 else
4673 bitsize = GET_MODE_BITSIZE (mode);
4675 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4677 tree lo_index = TREE_OPERAND (index, 0);
4678 tree hi_index = TREE_OPERAND (index, 1);
4679 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4680 struct nesting *loop;
4681 HOST_WIDE_INT lo, hi, count;
4682 tree position;
4684 /* If the range is constant and "small", unroll the loop. */
4685 if (const_bounds_p
4686 && host_integerp (lo_index, 0)
4687 && host_integerp (hi_index, 0)
4688 && (lo = tree_low_cst (lo_index, 0),
4689 hi = tree_low_cst (hi_index, 0),
4690 count = hi - lo + 1,
4691 (GET_CODE (target) != MEM
4692 || count <= 2
4693 || (host_integerp (TYPE_SIZE (elttype), 1)
4694 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4695 <= 40 * 8)))))
4697 lo -= minelt; hi -= minelt;
4698 for (; lo <= hi; lo++)
4700 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4701 store_constructor_field
4702 (target, bitsize, bitpos, mode, value, type, align,
4703 cleared,
4704 TYPE_NONALIASED_COMPONENT (type)
4705 ? MEM_ALIAS_SET (target) : get_alias_set (elttype));
4708 else
4710 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4711 loop_top = gen_label_rtx ();
4712 loop_end = gen_label_rtx ();
4714 unsignedp = TREE_UNSIGNED (domain);
4716 index = build_decl (VAR_DECL, NULL_TREE, domain);
4718 index_r
4719 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4720 &unsignedp, 0));
4721 SET_DECL_RTL (index, index_r);
4722 if (TREE_CODE (value) == SAVE_EXPR
4723 && SAVE_EXPR_RTL (value) == 0)
4725 /* Make sure value gets expanded once before the
4726 loop. */
4727 expand_expr (value, const0_rtx, VOIDmode, 0);
4728 emit_queue ();
4730 store_expr (lo_index, index_r, 0);
4731 loop = expand_start_loop (0);
4733 /* Assign value to element index. */
4734 position
4735 = convert (ssizetype,
4736 fold (build (MINUS_EXPR, TREE_TYPE (index),
4737 index, TYPE_MIN_VALUE (domain))));
4738 position = size_binop (MULT_EXPR, position,
4739 convert (ssizetype,
4740 TYPE_SIZE_UNIT (elttype)));
4742 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4743 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4744 xtarget = change_address (target, mode, addr);
4745 if (TREE_CODE (value) == CONSTRUCTOR)
4746 store_constructor (value, xtarget, align, cleared,
4747 bitsize / BITS_PER_UNIT);
4748 else
4749 store_expr (value, xtarget, 0);
4751 expand_exit_loop_if_false (loop,
4752 build (LT_EXPR, integer_type_node,
4753 index, hi_index));
4755 expand_increment (build (PREINCREMENT_EXPR,
4756 TREE_TYPE (index),
4757 index, integer_one_node), 0, 0);
4758 expand_end_loop ();
4759 emit_label (loop_end);
4762 else if ((index != 0 && ! host_integerp (index, 0))
4763 || ! host_integerp (TYPE_SIZE (elttype), 1))
4765 rtx pos_rtx, addr;
4766 tree position;
4768 if (index == 0)
4769 index = ssize_int (1);
4771 if (minelt)
4772 index = convert (ssizetype,
4773 fold (build (MINUS_EXPR, index,
4774 TYPE_MIN_VALUE (domain))));
4776 position = size_binop (MULT_EXPR, index,
4777 convert (ssizetype,
4778 TYPE_SIZE_UNIT (elttype)));
4779 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4780 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4781 xtarget = change_address (target, mode, addr);
4782 store_expr (value, xtarget, 0);
4784 else
4786 if (index != 0)
4787 bitpos = ((tree_low_cst (index, 0) - minelt)
4788 * tree_low_cst (TYPE_SIZE (elttype), 1));
4789 else
4790 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4792 store_constructor_field (target, bitsize, bitpos, mode, value,
4793 type, align, cleared,
4794 TYPE_NONALIASED_COMPONENT (type)
4795 && GET_CODE (target) == MEM
4796 ? MEM_ALIAS_SET (target) :
4797 get_alias_set (elttype));
4803 /* Set constructor assignments. */
4804 else if (TREE_CODE (type) == SET_TYPE)
4806 tree elt = CONSTRUCTOR_ELTS (exp);
4807 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4808 tree domain = TYPE_DOMAIN (type);
4809 tree domain_min, domain_max, bitlength;
4811 /* The default implementation strategy is to extract the constant
4812 parts of the constructor, use that to initialize the target,
4813 and then "or" in whatever non-constant ranges we need in addition.
4815 If a large set is all zero or all ones, it is
4816 probably better to set it using memset (if available) or bzero.
4817 Also, if a large set has just a single range, it may also be
4818 better to first clear all the first clear the set (using
4819 bzero/memset), and set the bits we want. */
4821 /* Check for all zeros. */
4822 if (elt == NULL_TREE && size > 0)
4824 if (!cleared)
4825 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
4826 return;
4829 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4830 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4831 bitlength = size_binop (PLUS_EXPR,
4832 size_diffop (domain_max, domain_min),
4833 ssize_int (1));
4835 nbits = tree_low_cst (bitlength, 1);
4837 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4838 are "complicated" (more than one range), initialize (the
4839 constant parts) by copying from a constant. */
4840 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4841 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4843 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4844 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4845 char *bit_buffer = (char *) alloca (nbits);
4846 HOST_WIDE_INT word = 0;
4847 unsigned int bit_pos = 0;
4848 unsigned int ibit = 0;
4849 unsigned int offset = 0; /* In bytes from beginning of set. */
4851 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4852 for (;;)
4854 if (bit_buffer[ibit])
4856 if (BYTES_BIG_ENDIAN)
4857 word |= (1 << (set_word_size - 1 - bit_pos));
4858 else
4859 word |= 1 << bit_pos;
4862 bit_pos++; ibit++;
4863 if (bit_pos >= set_word_size || ibit == nbits)
4865 if (word != 0 || ! cleared)
4867 rtx datum = GEN_INT (word);
4868 rtx to_rtx;
4870 /* The assumption here is that it is safe to use
4871 XEXP if the set is multi-word, but not if
4872 it's single-word. */
4873 if (GET_CODE (target) == MEM)
4875 to_rtx = plus_constant (XEXP (target, 0), offset);
4876 to_rtx = change_address (target, mode, to_rtx);
4878 else if (offset == 0)
4879 to_rtx = target;
4880 else
4881 abort ();
4882 emit_move_insn (to_rtx, datum);
4885 if (ibit == nbits)
4886 break;
4887 word = 0;
4888 bit_pos = 0;
4889 offset += set_word_size / BITS_PER_UNIT;
4893 else if (!cleared)
4894 /* Don't bother clearing storage if the set is all ones. */
4895 if (TREE_CHAIN (elt) != NULL_TREE
4896 || (TREE_PURPOSE (elt) == NULL_TREE
4897 ? nbits != 1
4898 : ( ! host_integerp (TREE_VALUE (elt), 0)
4899 || ! host_integerp (TREE_PURPOSE (elt), 0)
4900 || (tree_low_cst (TREE_VALUE (elt), 0)
4901 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4902 != (HOST_WIDE_INT) nbits))))
4903 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4905 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4907 /* Start of range of element or NULL. */
4908 tree startbit = TREE_PURPOSE (elt);
4909 /* End of range of element, or element value. */
4910 tree endbit = TREE_VALUE (elt);
4911 #ifdef TARGET_MEM_FUNCTIONS
4912 HOST_WIDE_INT startb, endb;
4913 #endif
4914 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4916 bitlength_rtx = expand_expr (bitlength,
4917 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4919 /* Handle non-range tuple element like [ expr ]. */
4920 if (startbit == NULL_TREE)
4922 startbit = save_expr (endbit);
4923 endbit = startbit;
4926 startbit = convert (sizetype, startbit);
4927 endbit = convert (sizetype, endbit);
4928 if (! integer_zerop (domain_min))
4930 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4931 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4933 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4934 EXPAND_CONST_ADDRESS);
4935 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4936 EXPAND_CONST_ADDRESS);
4938 if (REG_P (target))
4940 targetx
4941 = assign_temp
4942 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
4943 TYPE_QUAL_CONST)),
4944 0, 1, 1);
4945 emit_move_insn (targetx, target);
4948 else if (GET_CODE (target) == MEM)
4949 targetx = target;
4950 else
4951 abort ();
4953 #ifdef TARGET_MEM_FUNCTIONS
4954 /* Optimization: If startbit and endbit are
4955 constants divisible by BITS_PER_UNIT,
4956 call memset instead. */
4957 if (TREE_CODE (startbit) == INTEGER_CST
4958 && TREE_CODE (endbit) == INTEGER_CST
4959 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4960 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4962 emit_library_call (memset_libfunc, LCT_NORMAL,
4963 VOIDmode, 3,
4964 plus_constant (XEXP (targetx, 0),
4965 startb / BITS_PER_UNIT),
4966 Pmode,
4967 constm1_rtx, TYPE_MODE (integer_type_node),
4968 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4969 TYPE_MODE (sizetype));
4971 else
4972 #endif
4973 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4974 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
4975 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
4976 startbit_rtx, TYPE_MODE (sizetype),
4977 endbit_rtx, TYPE_MODE (sizetype));
4979 if (REG_P (target))
4980 emit_move_insn (target, targetx);
4984 else
4985 abort ();
4988 /* Store the value of EXP (an expression tree)
4989 into a subfield of TARGET which has mode MODE and occupies
4990 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4991 If MODE is VOIDmode, it means that we are storing into a bit-field.
4993 If VALUE_MODE is VOIDmode, return nothing in particular.
4994 UNSIGNEDP is not used in this case.
4996 Otherwise, return an rtx for the value stored. This rtx
4997 has mode VALUE_MODE if that is convenient to do.
4998 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5000 ALIGN is the alignment that TARGET is known to have.
5001 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
5003 ALIAS_SET is the alias set for the destination. This value will
5004 (in general) be different from that for TARGET, since TARGET is a
5005 reference to the containing structure. */
5007 static rtx
5008 store_field (target, bitsize, bitpos, mode, exp, value_mode,
5009 unsignedp, align, total_size, alias_set)
5010 rtx target;
5011 HOST_WIDE_INT bitsize;
5012 HOST_WIDE_INT bitpos;
5013 enum machine_mode mode;
5014 tree exp;
5015 enum machine_mode value_mode;
5016 int unsignedp;
5017 unsigned int align;
5018 HOST_WIDE_INT total_size;
5019 int alias_set;
5021 HOST_WIDE_INT width_mask = 0;
5023 if (TREE_CODE (exp) == ERROR_MARK)
5024 return const0_rtx;
5026 /* If we have nothing to store, do nothing unless the expression has
5027 side-effects. */
5028 if (bitsize == 0)
5029 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5031 if (bitsize < HOST_BITS_PER_WIDE_INT)
5032 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5034 /* If we are storing into an unaligned field of an aligned union that is
5035 in a register, we may have the mode of TARGET being an integer mode but
5036 MODE == BLKmode. In that case, get an aligned object whose size and
5037 alignment are the same as TARGET and store TARGET into it (we can avoid
5038 the store if the field being stored is the entire width of TARGET). Then
5039 call ourselves recursively to store the field into a BLKmode version of
5040 that object. Finally, load from the object into TARGET. This is not
5041 very efficient in general, but should only be slightly more expensive
5042 than the otherwise-required unaligned accesses. Perhaps this can be
5043 cleaned up later. */
5045 if (mode == BLKmode
5046 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5048 rtx object
5049 = assign_temp
5050 (build_qualified_type (type_for_mode (GET_MODE (target), 0),
5051 TYPE_QUAL_CONST),
5052 0, 1, 1);
5053 rtx blk_object = copy_rtx (object);
5055 PUT_MODE (blk_object, BLKmode);
5057 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5058 emit_move_insn (object, target);
5060 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
5061 align, total_size, alias_set);
5063 /* Even though we aren't returning target, we need to
5064 give it the updated value. */
5065 emit_move_insn (target, object);
5067 return blk_object;
5070 if (GET_CODE (target) == CONCAT)
5072 /* We're storing into a struct containing a single __complex. */
5074 if (bitpos != 0)
5075 abort ();
5076 return store_expr (exp, target, 0);
5079 /* If the structure is in a register or if the component
5080 is a bit field, we cannot use addressing to access it.
5081 Use bit-field techniques or SUBREG to store in it. */
5083 if (mode == VOIDmode
5084 || (mode != BLKmode && ! direct_store[(int) mode]
5085 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5086 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5087 || GET_CODE (target) == REG
5088 || GET_CODE (target) == SUBREG
5089 /* If the field isn't aligned enough to store as an ordinary memref,
5090 store it as a bit field. */
5091 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5092 && (align < GET_MODE_ALIGNMENT (mode)
5093 || bitpos % GET_MODE_ALIGNMENT (mode)))
5094 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5095 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
5096 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
5097 /* If the RHS and field are a constant size and the size of the
5098 RHS isn't the same size as the bitfield, we must use bitfield
5099 operations. */
5100 || (bitsize >= 0
5101 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5102 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5104 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5106 /* If BITSIZE is narrower than the size of the type of EXP
5107 we will be narrowing TEMP. Normally, what's wanted are the
5108 low-order bits. However, if EXP's type is a record and this is
5109 big-endian machine, we want the upper BITSIZE bits. */
5110 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5111 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
5112 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5113 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5114 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5115 - bitsize),
5116 temp, 1);
5118 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5119 MODE. */
5120 if (mode != VOIDmode && mode != BLKmode
5121 && mode != TYPE_MODE (TREE_TYPE (exp)))
5122 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5124 /* If the modes of TARGET and TEMP are both BLKmode, both
5125 must be in memory and BITPOS must be aligned on a byte
5126 boundary. If so, we simply do a block copy. */
5127 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5129 unsigned int exp_align = expr_align (exp);
5131 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5132 || bitpos % BITS_PER_UNIT != 0)
5133 abort ();
5135 target = change_address (target, VOIDmode,
5136 plus_constant (XEXP (target, 0),
5137 bitpos / BITS_PER_UNIT));
5139 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
5140 align = MIN (exp_align, align);
5142 /* Find an alignment that is consistent with the bit position. */
5143 while ((bitpos % align) != 0)
5144 align >>= 1;
5146 emit_block_move (target, temp,
5147 bitsize == -1 ? expr_size (exp)
5148 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5149 / BITS_PER_UNIT),
5150 align);
5152 return value_mode == VOIDmode ? const0_rtx : target;
5155 /* Store the value in the bitfield. */
5156 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
5157 if (value_mode != VOIDmode)
5159 /* The caller wants an rtx for the value. */
5160 /* If possible, avoid refetching from the bitfield itself. */
5161 if (width_mask != 0
5162 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5164 tree count;
5165 enum machine_mode tmode;
5167 if (unsignedp)
5168 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
5169 tmode = GET_MODE (temp);
5170 if (tmode == VOIDmode)
5171 tmode = value_mode;
5172 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5173 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5174 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5176 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5177 NULL_RTX, value_mode, 0, align,
5178 total_size);
5180 return const0_rtx;
5182 else
5184 rtx addr = XEXP (target, 0);
5185 rtx to_rtx;
5187 /* If a value is wanted, it must be the lhs;
5188 so make the address stable for multiple use. */
5190 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5191 && ! CONSTANT_ADDRESS_P (addr)
5192 /* A frame-pointer reference is already stable. */
5193 && ! (GET_CODE (addr) == PLUS
5194 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5195 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5196 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5197 addr = copy_to_reg (addr);
5199 /* Now build a reference to just the desired component. */
5201 to_rtx = copy_rtx (change_address (target, mode,
5202 plus_constant (addr,
5203 (bitpos
5204 / BITS_PER_UNIT))));
5205 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5206 /* If the address of the structure varies, then it might be on
5207 the stack. And, stack slots may be shared across scopes.
5208 So, two different structures, of different types, can end up
5209 at the same location. We will give the structures alias set
5210 zero; here we must be careful not to give non-zero alias sets
5211 to their fields. */
5212 if (!rtx_varies_p (addr, /*for_alias=*/0))
5213 MEM_ALIAS_SET (to_rtx) = alias_set;
5214 else
5215 MEM_ALIAS_SET (to_rtx) = 0;
5217 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5221 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5222 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
5223 ARRAY_REFs and find the ultimate containing object, which we return.
5225 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5226 bit position, and *PUNSIGNEDP to the signedness of the field.
5227 If the position of the field is variable, we store a tree
5228 giving the variable offset (in units) in *POFFSET.
5229 This offset is in addition to the bit position.
5230 If the position is not variable, we store 0 in *POFFSET.
5231 We set *PALIGNMENT to the alignment of the address that will be
5232 computed. This is the alignment of the thing we return if *POFFSET
5233 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5235 If any of the extraction expressions is volatile,
5236 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5238 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5239 is a mode that can be used to access the field. In that case, *PBITSIZE
5240 is redundant.
5242 If the field describes a variable-sized object, *PMODE is set to
5243 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5244 this case, but the address of the object can be found. */
5246 tree
5247 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5248 punsignedp, pvolatilep, palignment)
5249 tree exp;
5250 HOST_WIDE_INT *pbitsize;
5251 HOST_WIDE_INT *pbitpos;
5252 tree *poffset;
5253 enum machine_mode *pmode;
5254 int *punsignedp;
5255 int *pvolatilep;
5256 unsigned int *palignment;
5258 tree size_tree = 0;
5259 enum machine_mode mode = VOIDmode;
5260 tree offset = size_zero_node;
5261 tree bit_offset = bitsize_zero_node;
5262 unsigned int alignment = BIGGEST_ALIGNMENT;
5263 tree tem;
5265 /* First get the mode, signedness, and size. We do this from just the
5266 outermost expression. */
5267 if (TREE_CODE (exp) == COMPONENT_REF)
5269 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5270 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5271 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5273 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5275 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5277 size_tree = TREE_OPERAND (exp, 1);
5278 *punsignedp = TREE_UNSIGNED (exp);
5280 else
5282 mode = TYPE_MODE (TREE_TYPE (exp));
5283 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5285 if (mode == BLKmode)
5286 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5287 else
5288 *pbitsize = GET_MODE_BITSIZE (mode);
5291 if (size_tree != 0)
5293 if (! host_integerp (size_tree, 1))
5294 mode = BLKmode, *pbitsize = -1;
5295 else
5296 *pbitsize = tree_low_cst (size_tree, 1);
5299 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5300 and find the ultimate containing object. */
5301 while (1)
5303 if (TREE_CODE (exp) == BIT_FIELD_REF)
5304 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5305 else if (TREE_CODE (exp) == COMPONENT_REF)
5307 tree field = TREE_OPERAND (exp, 1);
5308 tree this_offset = DECL_FIELD_OFFSET (field);
5310 /* If this field hasn't been filled in yet, don't go
5311 past it. This should only happen when folding expressions
5312 made during type construction. */
5313 if (this_offset == 0)
5314 break;
5315 else if (! TREE_CONSTANT (this_offset)
5316 && contains_placeholder_p (this_offset))
5317 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5319 offset = size_binop (PLUS_EXPR, offset, this_offset);
5320 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5321 DECL_FIELD_BIT_OFFSET (field));
5323 if (! host_integerp (offset, 0))
5324 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5327 else if (TREE_CODE (exp) == ARRAY_REF)
5329 tree index = TREE_OPERAND (exp, 1);
5330 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5331 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5332 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (exp));
5334 /* We assume all arrays have sizes that are a multiple of a byte.
5335 First subtract the lower bound, if any, in the type of the
5336 index, then convert to sizetype and multiply by the size of the
5337 array element. */
5338 if (low_bound != 0 && ! integer_zerop (low_bound))
5339 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5340 index, low_bound));
5342 /* If the index has a self-referential type, pass it to a
5343 WITH_RECORD_EXPR; if the component size is, pass our
5344 component to one. */
5345 if (! TREE_CONSTANT (index)
5346 && contains_placeholder_p (index))
5347 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5348 if (! TREE_CONSTANT (unit_size)
5349 && contains_placeholder_p (unit_size))
5350 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size,
5351 TREE_OPERAND (exp, 0));
5353 offset = size_binop (PLUS_EXPR, offset,
5354 size_binop (MULT_EXPR,
5355 convert (sizetype, index),
5356 unit_size));
5359 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5360 && ! ((TREE_CODE (exp) == NOP_EXPR
5361 || TREE_CODE (exp) == CONVERT_EXPR)
5362 && (TYPE_MODE (TREE_TYPE (exp))
5363 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5364 break;
5366 /* If any reference in the chain is volatile, the effect is volatile. */
5367 if (TREE_THIS_VOLATILE (exp))
5368 *pvolatilep = 1;
5370 /* If the offset is non-constant already, then we can't assume any
5371 alignment more than the alignment here. */
5372 if (! TREE_CONSTANT (offset))
5373 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5375 exp = TREE_OPERAND (exp, 0);
5378 if (DECL_P (exp))
5379 alignment = MIN (alignment, DECL_ALIGN (exp));
5380 else if (TREE_TYPE (exp) != 0)
5381 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5383 /* If OFFSET is constant, see if we can return the whole thing as a
5384 constant bit position. Otherwise, split it up. */
5385 if (host_integerp (offset, 0)
5386 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5387 bitsize_unit_node))
5388 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5389 && host_integerp (tem, 0))
5390 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5391 else
5392 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5394 *pmode = mode;
5395 *palignment = alignment;
5396 return exp;
5399 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5401 static enum memory_use_mode
5402 get_memory_usage_from_modifier (modifier)
5403 enum expand_modifier modifier;
5405 switch (modifier)
5407 case EXPAND_NORMAL:
5408 case EXPAND_SUM:
5409 return MEMORY_USE_RO;
5410 break;
5411 case EXPAND_MEMORY_USE_WO:
5412 return MEMORY_USE_WO;
5413 break;
5414 case EXPAND_MEMORY_USE_RW:
5415 return MEMORY_USE_RW;
5416 break;
5417 case EXPAND_MEMORY_USE_DONT:
5418 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5419 MEMORY_USE_DONT, because they are modifiers to a call of
5420 expand_expr in the ADDR_EXPR case of expand_expr. */
5421 case EXPAND_CONST_ADDRESS:
5422 case EXPAND_INITIALIZER:
5423 return MEMORY_USE_DONT;
5424 case EXPAND_MEMORY_USE_BAD:
5425 default:
5426 abort ();
5430 /* Given an rtx VALUE that may contain additions and multiplications, return
5431 an equivalent value that just refers to a register, memory, or constant.
5432 This is done by generating instructions to perform the arithmetic and
5433 returning a pseudo-register containing the value.
5435 The returned value may be a REG, SUBREG, MEM or constant. */
5438 force_operand (value, target)
5439 rtx value, target;
5441 register optab binoptab = 0;
5442 /* Use a temporary to force order of execution of calls to
5443 `force_operand'. */
5444 rtx tmp;
5445 register rtx op2;
5446 /* Use subtarget as the target for operand 0 of a binary operation. */
5447 register rtx subtarget = get_subtarget (target);
5449 /* Check for a PIC address load. */
5450 if (flag_pic
5451 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5452 && XEXP (value, 0) == pic_offset_table_rtx
5453 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5454 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5455 || GET_CODE (XEXP (value, 1)) == CONST))
5457 if (!subtarget)
5458 subtarget = gen_reg_rtx (GET_MODE (value));
5459 emit_move_insn (subtarget, value);
5460 return subtarget;
5463 if (GET_CODE (value) == PLUS)
5464 binoptab = add_optab;
5465 else if (GET_CODE (value) == MINUS)
5466 binoptab = sub_optab;
5467 else if (GET_CODE (value) == MULT)
5469 op2 = XEXP (value, 1);
5470 if (!CONSTANT_P (op2)
5471 && !(GET_CODE (op2) == REG && op2 != subtarget))
5472 subtarget = 0;
5473 tmp = force_operand (XEXP (value, 0), subtarget);
5474 return expand_mult (GET_MODE (value), tmp,
5475 force_operand (op2, NULL_RTX),
5476 target, 1);
5479 if (binoptab)
5481 op2 = XEXP (value, 1);
5482 if (!CONSTANT_P (op2)
5483 && !(GET_CODE (op2) == REG && op2 != subtarget))
5484 subtarget = 0;
5485 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5487 binoptab = add_optab;
5488 op2 = negate_rtx (GET_MODE (value), op2);
5491 /* Check for an addition with OP2 a constant integer and our first
5492 operand a PLUS of a virtual register and something else. In that
5493 case, we want to emit the sum of the virtual register and the
5494 constant first and then add the other value. This allows virtual
5495 register instantiation to simply modify the constant rather than
5496 creating another one around this addition. */
5497 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5498 && GET_CODE (XEXP (value, 0)) == PLUS
5499 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5500 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5501 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5503 rtx temp = expand_binop (GET_MODE (value), binoptab,
5504 XEXP (XEXP (value, 0), 0), op2,
5505 subtarget, 0, OPTAB_LIB_WIDEN);
5506 return expand_binop (GET_MODE (value), binoptab, temp,
5507 force_operand (XEXP (XEXP (value, 0), 1), 0),
5508 target, 0, OPTAB_LIB_WIDEN);
5511 tmp = force_operand (XEXP (value, 0), subtarget);
5512 return expand_binop (GET_MODE (value), binoptab, tmp,
5513 force_operand (op2, NULL_RTX),
5514 target, 0, OPTAB_LIB_WIDEN);
5515 /* We give UNSIGNEDP = 0 to expand_binop
5516 because the only operations we are expanding here are signed ones. */
5518 return value;
5521 /* Subroutine of expand_expr:
5522 save the non-copied parts (LIST) of an expr (LHS), and return a list
5523 which can restore these values to their previous values,
5524 should something modify their storage. */
5526 static tree
5527 save_noncopied_parts (lhs, list)
5528 tree lhs;
5529 tree list;
5531 tree tail;
5532 tree parts = 0;
5534 for (tail = list; tail; tail = TREE_CHAIN (tail))
5535 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5536 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5537 else
5539 tree part = TREE_VALUE (tail);
5540 tree part_type = TREE_TYPE (part);
5541 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5542 rtx target
5543 = assign_temp (build_qualified_type (part_type,
5544 (TYPE_QUALS (part_type)
5545 | TYPE_QUAL_CONST)),
5546 0, 1, 1);
5548 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5549 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5550 parts = tree_cons (to_be_saved,
5551 build (RTL_EXPR, part_type, NULL_TREE,
5552 (tree) target),
5553 parts);
5554 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5556 return parts;
5559 /* Subroutine of expand_expr:
5560 record the non-copied parts (LIST) of an expr (LHS), and return a list
5561 which specifies the initial values of these parts. */
5563 static tree
5564 init_noncopied_parts (lhs, list)
5565 tree lhs;
5566 tree list;
5568 tree tail;
5569 tree parts = 0;
5571 for (tail = list; tail; tail = TREE_CHAIN (tail))
5572 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5573 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5574 else if (TREE_PURPOSE (tail))
5576 tree part = TREE_VALUE (tail);
5577 tree part_type = TREE_TYPE (part);
5578 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5579 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5581 return parts;
5584 /* Subroutine of expand_expr: return nonzero iff there is no way that
5585 EXP can reference X, which is being modified. TOP_P is nonzero if this
5586 call is going to be used to determine whether we need a temporary
5587 for EXP, as opposed to a recursive call to this function.
5589 It is always safe for this routine to return zero since it merely
5590 searches for optimization opportunities. */
5593 safe_from_p (x, exp, top_p)
5594 rtx x;
5595 tree exp;
5596 int top_p;
5598 rtx exp_rtl = 0;
5599 int i, nops;
5600 static tree save_expr_list;
5602 if (x == 0
5603 /* If EXP has varying size, we MUST use a target since we currently
5604 have no way of allocating temporaries of variable size
5605 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5606 So we assume here that something at a higher level has prevented a
5607 clash. This is somewhat bogus, but the best we can do. Only
5608 do this when X is BLKmode and when we are at the top level. */
5609 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5610 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5611 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5612 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5613 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5614 != INTEGER_CST)
5615 && GET_MODE (x) == BLKmode)
5616 /* If X is in the outgoing argument area, it is always safe. */
5617 || (GET_CODE (x) == MEM
5618 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5619 || (GET_CODE (XEXP (x, 0)) == PLUS
5620 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5621 return 1;
5623 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5624 find the underlying pseudo. */
5625 if (GET_CODE (x) == SUBREG)
5627 x = SUBREG_REG (x);
5628 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5629 return 0;
5632 /* A SAVE_EXPR might appear many times in the expression passed to the
5633 top-level safe_from_p call, and if it has a complex subexpression,
5634 examining it multiple times could result in a combinatorial explosion.
5635 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5636 with optimization took about 28 minutes to compile -- even though it was
5637 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5638 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5639 we have processed. Note that the only test of top_p was above. */
5641 if (top_p)
5643 int rtn;
5644 tree t;
5646 save_expr_list = 0;
5648 rtn = safe_from_p (x, exp, 0);
5650 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5651 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5653 return rtn;
5656 /* Now look at our tree code and possibly recurse. */
5657 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5659 case 'd':
5660 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
5661 break;
5663 case 'c':
5664 return 1;
5666 case 'x':
5667 if (TREE_CODE (exp) == TREE_LIST)
5668 return ((TREE_VALUE (exp) == 0
5669 || safe_from_p (x, TREE_VALUE (exp), 0))
5670 && (TREE_CHAIN (exp) == 0
5671 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5672 else if (TREE_CODE (exp) == ERROR_MARK)
5673 return 1; /* An already-visited SAVE_EXPR? */
5674 else
5675 return 0;
5677 case '1':
5678 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5680 case '2':
5681 case '<':
5682 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5683 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5685 case 'e':
5686 case 'r':
5687 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5688 the expression. If it is set, we conflict iff we are that rtx or
5689 both are in memory. Otherwise, we check all operands of the
5690 expression recursively. */
5692 switch (TREE_CODE (exp))
5694 case ADDR_EXPR:
5695 return (staticp (TREE_OPERAND (exp, 0))
5696 || TREE_STATIC (exp)
5697 || safe_from_p (x, TREE_OPERAND (exp, 0), 0));
5699 case INDIRECT_REF:
5700 if (GET_CODE (x) == MEM
5701 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5702 get_alias_set (exp)))
5703 return 0;
5704 break;
5706 case CALL_EXPR:
5707 /* Assume that the call will clobber all hard registers and
5708 all of memory. */
5709 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5710 || GET_CODE (x) == MEM)
5711 return 0;
5712 break;
5714 case RTL_EXPR:
5715 /* If a sequence exists, we would have to scan every instruction
5716 in the sequence to see if it was safe. This is probably not
5717 worthwhile. */
5718 if (RTL_EXPR_SEQUENCE (exp))
5719 return 0;
5721 exp_rtl = RTL_EXPR_RTL (exp);
5722 break;
5724 case WITH_CLEANUP_EXPR:
5725 exp_rtl = RTL_EXPR_RTL (exp);
5726 break;
5728 case CLEANUP_POINT_EXPR:
5729 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5731 case SAVE_EXPR:
5732 exp_rtl = SAVE_EXPR_RTL (exp);
5733 if (exp_rtl)
5734 break;
5736 /* If we've already scanned this, don't do it again. Otherwise,
5737 show we've scanned it and record for clearing the flag if we're
5738 going on. */
5739 if (TREE_PRIVATE (exp))
5740 return 1;
5742 TREE_PRIVATE (exp) = 1;
5743 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5745 TREE_PRIVATE (exp) = 0;
5746 return 0;
5749 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5750 return 1;
5752 case BIND_EXPR:
5753 /* The only operand we look at is operand 1. The rest aren't
5754 part of the expression. */
5755 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5757 case METHOD_CALL_EXPR:
5758 /* This takes a rtx argument, but shouldn't appear here. */
5759 abort ();
5761 default:
5762 break;
5765 /* If we have an rtx, we do not need to scan our operands. */
5766 if (exp_rtl)
5767 break;
5769 nops = first_rtl_op (TREE_CODE (exp));
5770 for (i = 0; i < nops; i++)
5771 if (TREE_OPERAND (exp, i) != 0
5772 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5773 return 0;
5775 /* If this is a language-specific tree code, it may require
5776 special handling. */
5777 if ((unsigned int) TREE_CODE (exp)
5778 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5779 && lang_safe_from_p
5780 && !(*lang_safe_from_p) (x, exp))
5781 return 0;
5784 /* If we have an rtl, find any enclosed object. Then see if we conflict
5785 with it. */
5786 if (exp_rtl)
5788 if (GET_CODE (exp_rtl) == SUBREG)
5790 exp_rtl = SUBREG_REG (exp_rtl);
5791 if (GET_CODE (exp_rtl) == REG
5792 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5793 return 0;
5796 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5797 are memory and they conflict. */
5798 return ! (rtx_equal_p (x, exp_rtl)
5799 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5800 && true_dependence (exp_rtl, GET_MODE (x), x,
5801 rtx_addr_varies_p)));
5804 /* If we reach here, it is safe. */
5805 return 1;
5808 /* Subroutine of expand_expr: return nonzero iff EXP is an
5809 expression whose type is statically determinable. */
5811 static int
5812 fixed_type_p (exp)
5813 tree exp;
5815 if (TREE_CODE (exp) == PARM_DECL
5816 || TREE_CODE (exp) == VAR_DECL
5817 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5818 || TREE_CODE (exp) == COMPONENT_REF
5819 || TREE_CODE (exp) == ARRAY_REF)
5820 return 1;
5821 return 0;
5824 /* Subroutine of expand_expr: return rtx if EXP is a
5825 variable or parameter; else return 0. */
5827 static rtx
5828 var_rtx (exp)
5829 tree exp;
5831 STRIP_NOPS (exp);
5832 switch (TREE_CODE (exp))
5834 case PARM_DECL:
5835 case VAR_DECL:
5836 return DECL_RTL (exp);
5837 default:
5838 return 0;
5842 #ifdef MAX_INTEGER_COMPUTATION_MODE
5844 void
5845 check_max_integer_computation_mode (exp)
5846 tree exp;
5848 enum tree_code code;
5849 enum machine_mode mode;
5851 /* Strip any NOPs that don't change the mode. */
5852 STRIP_NOPS (exp);
5853 code = TREE_CODE (exp);
5855 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5856 if (code == NOP_EXPR
5857 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5858 return;
5860 /* First check the type of the overall operation. We need only look at
5861 unary, binary and relational operations. */
5862 if (TREE_CODE_CLASS (code) == '1'
5863 || TREE_CODE_CLASS (code) == '2'
5864 || TREE_CODE_CLASS (code) == '<')
5866 mode = TYPE_MODE (TREE_TYPE (exp));
5867 if (GET_MODE_CLASS (mode) == MODE_INT
5868 && mode > MAX_INTEGER_COMPUTATION_MODE)
5869 internal_error ("unsupported wide integer operation");
5872 /* Check operand of a unary op. */
5873 if (TREE_CODE_CLASS (code) == '1')
5875 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5876 if (GET_MODE_CLASS (mode) == MODE_INT
5877 && mode > MAX_INTEGER_COMPUTATION_MODE)
5878 internal_error ("unsupported wide integer operation");
5881 /* Check operands of a binary/comparison op. */
5882 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5884 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5885 if (GET_MODE_CLASS (mode) == MODE_INT
5886 && mode > MAX_INTEGER_COMPUTATION_MODE)
5887 internal_error ("unsupported wide integer operation");
5889 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5890 if (GET_MODE_CLASS (mode) == MODE_INT
5891 && mode > MAX_INTEGER_COMPUTATION_MODE)
5892 internal_error ("unsupported wide integer operation");
5895 #endif
5897 /* expand_expr: generate code for computing expression EXP.
5898 An rtx for the computed value is returned. The value is never null.
5899 In the case of a void EXP, const0_rtx is returned.
5901 The value may be stored in TARGET if TARGET is nonzero.
5902 TARGET is just a suggestion; callers must assume that
5903 the rtx returned may not be the same as TARGET.
5905 If TARGET is CONST0_RTX, it means that the value will be ignored.
5907 If TMODE is not VOIDmode, it suggests generating the
5908 result in mode TMODE. But this is done only when convenient.
5909 Otherwise, TMODE is ignored and the value generated in its natural mode.
5910 TMODE is just a suggestion; callers must assume that
5911 the rtx returned may not have mode TMODE.
5913 Note that TARGET may have neither TMODE nor MODE. In that case, it
5914 probably will not be used.
5916 If MODIFIER is EXPAND_SUM then when EXP is an addition
5917 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5918 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5919 products as above, or REG or MEM, or constant.
5920 Ordinarily in such cases we would output mul or add instructions
5921 and then return a pseudo reg containing the sum.
5923 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5924 it also marks a label as absolutely required (it can't be dead).
5925 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5926 This is used for outputting expressions used in initializers.
5928 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5929 with a constant address even if that address is not normally legitimate.
5930 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5933 expand_expr (exp, target, tmode, modifier)
5934 register tree exp;
5935 rtx target;
5936 enum machine_mode tmode;
5937 enum expand_modifier modifier;
5939 register rtx op0, op1, temp;
5940 tree type = TREE_TYPE (exp);
5941 int unsignedp = TREE_UNSIGNED (type);
5942 register enum machine_mode mode;
5943 register enum tree_code code = TREE_CODE (exp);
5944 optab this_optab;
5945 rtx subtarget, original_target;
5946 int ignore;
5947 tree context;
5948 /* Used by check-memory-usage to make modifier read only. */
5949 enum expand_modifier ro_modifier;
5951 /* Handle ERROR_MARK before anybody tries to access its type. */
5952 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
5954 op0 = CONST0_RTX (tmode);
5955 if (op0 != 0)
5956 return op0;
5957 return const0_rtx;
5960 mode = TYPE_MODE (type);
5961 /* Use subtarget as the target for operand 0 of a binary operation. */
5962 subtarget = get_subtarget (target);
5963 original_target = target;
5964 ignore = (target == const0_rtx
5965 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5966 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5967 || code == COND_EXPR)
5968 && TREE_CODE (type) == VOID_TYPE));
5970 /* Make a read-only version of the modifier. */
5971 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5972 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5973 ro_modifier = modifier;
5974 else
5975 ro_modifier = EXPAND_NORMAL;
5977 /* If we are going to ignore this result, we need only do something
5978 if there is a side-effect somewhere in the expression. If there
5979 is, short-circuit the most common cases here. Note that we must
5980 not call expand_expr with anything but const0_rtx in case this
5981 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5983 if (ignore)
5985 if (! TREE_SIDE_EFFECTS (exp))
5986 return const0_rtx;
5988 /* Ensure we reference a volatile object even if value is ignored, but
5989 don't do this if all we are doing is taking its address. */
5990 if (TREE_THIS_VOLATILE (exp)
5991 && TREE_CODE (exp) != FUNCTION_DECL
5992 && mode != VOIDmode && mode != BLKmode
5993 && modifier != EXPAND_CONST_ADDRESS)
5995 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5996 if (GET_CODE (temp) == MEM)
5997 temp = copy_to_reg (temp);
5998 return const0_rtx;
6001 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6002 || code == INDIRECT_REF || code == BUFFER_REF)
6003 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6004 VOIDmode, ro_modifier);
6005 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6006 || code == ARRAY_REF)
6008 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
6009 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
6010 return const0_rtx;
6012 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6013 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6014 /* If the second operand has no side effects, just evaluate
6015 the first. */
6016 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6017 VOIDmode, ro_modifier);
6018 else if (code == BIT_FIELD_REF)
6020 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
6021 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
6022 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
6023 return const0_rtx;
6026 target = 0;
6029 #ifdef MAX_INTEGER_COMPUTATION_MODE
6030 /* Only check stuff here if the mode we want is different from the mode
6031 of the expression; if it's the same, check_max_integer_computiation_mode
6032 will handle it. Do we really need to check this stuff at all? */
6034 if (target
6035 && GET_MODE (target) != mode
6036 && TREE_CODE (exp) != INTEGER_CST
6037 && TREE_CODE (exp) != PARM_DECL
6038 && TREE_CODE (exp) != ARRAY_REF
6039 && TREE_CODE (exp) != COMPONENT_REF
6040 && TREE_CODE (exp) != BIT_FIELD_REF
6041 && TREE_CODE (exp) != INDIRECT_REF
6042 && TREE_CODE (exp) != CALL_EXPR
6043 && TREE_CODE (exp) != VAR_DECL
6044 && TREE_CODE (exp) != RTL_EXPR)
6046 enum machine_mode mode = GET_MODE (target);
6048 if (GET_MODE_CLASS (mode) == MODE_INT
6049 && mode > MAX_INTEGER_COMPUTATION_MODE)
6050 internal_error ("unsupported wide integer operation");
6053 if (tmode != mode
6054 && TREE_CODE (exp) != INTEGER_CST
6055 && TREE_CODE (exp) != PARM_DECL
6056 && TREE_CODE (exp) != ARRAY_REF
6057 && TREE_CODE (exp) != COMPONENT_REF
6058 && TREE_CODE (exp) != BIT_FIELD_REF
6059 && TREE_CODE (exp) != INDIRECT_REF
6060 && TREE_CODE (exp) != VAR_DECL
6061 && TREE_CODE (exp) != CALL_EXPR
6062 && TREE_CODE (exp) != RTL_EXPR
6063 && GET_MODE_CLASS (tmode) == MODE_INT
6064 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6065 internal_error ("unsupported wide integer operation");
6067 check_max_integer_computation_mode (exp);
6068 #endif
6070 /* If will do cse, generate all results into pseudo registers
6071 since 1) that allows cse to find more things
6072 and 2) otherwise cse could produce an insn the machine
6073 cannot support. */
6075 if (! cse_not_expected && mode != BLKmode && target
6076 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
6077 target = subtarget;
6079 switch (code)
6081 case LABEL_DECL:
6083 tree function = decl_function_context (exp);
6084 /* Handle using a label in a containing function. */
6085 if (function != current_function_decl
6086 && function != inline_function_decl && function != 0)
6088 struct function *p = find_function_data (function);
6089 p->expr->x_forced_labels
6090 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6091 p->expr->x_forced_labels);
6093 else
6095 if (modifier == EXPAND_INITIALIZER)
6096 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6097 label_rtx (exp),
6098 forced_labels);
6101 temp = gen_rtx_MEM (FUNCTION_MODE,
6102 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6103 if (function != current_function_decl
6104 && function != inline_function_decl && function != 0)
6105 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6106 return temp;
6109 case PARM_DECL:
6110 if (DECL_RTL (exp) == 0)
6112 error_with_decl (exp, "prior parameter's size depends on `%s'");
6113 return CONST0_RTX (mode);
6116 /* ... fall through ... */
6118 case VAR_DECL:
6119 /* If a static var's type was incomplete when the decl was written,
6120 but the type is complete now, lay out the decl now. */
6121 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6122 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6124 layout_decl (exp, 0);
6125 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
6128 /* Although static-storage variables start off initialized, according to
6129 ANSI C, a memcpy could overwrite them with uninitialized values. So
6130 we check them too. This also lets us check for read-only variables
6131 accessed via a non-const declaration, in case it won't be detected
6132 any other way (e.g., in an embedded system or OS kernel without
6133 memory protection).
6135 Aggregates are not checked here; they're handled elsewhere. */
6136 if (cfun && current_function_check_memory_usage
6137 && code == VAR_DECL
6138 && GET_CODE (DECL_RTL (exp)) == MEM
6139 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6141 enum memory_use_mode memory_usage;
6142 memory_usage = get_memory_usage_from_modifier (modifier);
6144 in_check_memory_usage = 1;
6145 if (memory_usage != MEMORY_USE_DONT)
6146 emit_library_call (chkr_check_addr_libfunc,
6147 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6148 XEXP (DECL_RTL (exp), 0), Pmode,
6149 GEN_INT (int_size_in_bytes (type)),
6150 TYPE_MODE (sizetype),
6151 GEN_INT (memory_usage),
6152 TYPE_MODE (integer_type_node));
6153 in_check_memory_usage = 0;
6156 /* ... fall through ... */
6158 case FUNCTION_DECL:
6159 case RESULT_DECL:
6160 if (DECL_RTL (exp) == 0)
6161 abort ();
6163 /* Ensure variable marked as used even if it doesn't go through
6164 a parser. If it hasn't be used yet, write out an external
6165 definition. */
6166 if (! TREE_USED (exp))
6168 assemble_external (exp);
6169 TREE_USED (exp) = 1;
6172 /* Show we haven't gotten RTL for this yet. */
6173 temp = 0;
6175 /* Handle variables inherited from containing functions. */
6176 context = decl_function_context (exp);
6178 /* We treat inline_function_decl as an alias for the current function
6179 because that is the inline function whose vars, types, etc.
6180 are being merged into the current function.
6181 See expand_inline_function. */
6183 if (context != 0 && context != current_function_decl
6184 && context != inline_function_decl
6185 /* If var is static, we don't need a static chain to access it. */
6186 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6187 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6189 rtx addr;
6191 /* Mark as non-local and addressable. */
6192 DECL_NONLOCAL (exp) = 1;
6193 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6194 abort ();
6195 mark_addressable (exp);
6196 if (GET_CODE (DECL_RTL (exp)) != MEM)
6197 abort ();
6198 addr = XEXP (DECL_RTL (exp), 0);
6199 if (GET_CODE (addr) == MEM)
6200 addr = change_address (addr, Pmode,
6201 fix_lexical_addr (XEXP (addr, 0), exp));
6202 else
6203 addr = fix_lexical_addr (addr, exp);
6205 temp = change_address (DECL_RTL (exp), mode, addr);
6208 /* This is the case of an array whose size is to be determined
6209 from its initializer, while the initializer is still being parsed.
6210 See expand_decl. */
6212 else if (GET_CODE (DECL_RTL (exp)) == MEM
6213 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6214 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
6215 XEXP (DECL_RTL (exp), 0));
6217 /* If DECL_RTL is memory, we are in the normal case and either
6218 the address is not valid or it is not a register and -fforce-addr
6219 is specified, get the address into a register. */
6221 else if (GET_CODE (DECL_RTL (exp)) == MEM
6222 && modifier != EXPAND_CONST_ADDRESS
6223 && modifier != EXPAND_SUM
6224 && modifier != EXPAND_INITIALIZER
6225 && (! memory_address_p (DECL_MODE (exp),
6226 XEXP (DECL_RTL (exp), 0))
6227 || (flag_force_addr
6228 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6229 temp = change_address (DECL_RTL (exp), VOIDmode,
6230 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6232 /* If we got something, return it. But first, set the alignment
6233 if the address is a register. */
6234 if (temp != 0)
6236 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6237 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6239 return temp;
6242 /* If the mode of DECL_RTL does not match that of the decl, it
6243 must be a promoted value. We return a SUBREG of the wanted mode,
6244 but mark it so that we know that it was already extended. */
6246 if (GET_CODE (DECL_RTL (exp)) == REG
6247 && GET_MODE (DECL_RTL (exp)) != mode)
6249 /* Get the signedness used for this variable. Ensure we get the
6250 same mode we got when the variable was declared. */
6251 if (GET_MODE (DECL_RTL (exp))
6252 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6253 abort ();
6255 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
6256 SUBREG_PROMOTED_VAR_P (temp) = 1;
6257 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6258 return temp;
6261 return DECL_RTL (exp);
6263 case INTEGER_CST:
6264 return immed_double_const (TREE_INT_CST_LOW (exp),
6265 TREE_INT_CST_HIGH (exp), mode);
6267 case CONST_DECL:
6268 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6269 EXPAND_MEMORY_USE_BAD);
6271 case REAL_CST:
6272 /* If optimized, generate immediate CONST_DOUBLE
6273 which will be turned into memory by reload if necessary.
6275 We used to force a register so that loop.c could see it. But
6276 this does not allow gen_* patterns to perform optimizations with
6277 the constants. It also produces two insns in cases like "x = 1.0;".
6278 On most machines, floating-point constants are not permitted in
6279 many insns, so we'd end up copying it to a register in any case.
6281 Now, we do the copying in expand_binop, if appropriate. */
6282 return immed_real_const (exp);
6284 case COMPLEX_CST:
6285 case STRING_CST:
6286 if (! TREE_CST_RTL (exp))
6287 output_constant_def (exp, 1);
6289 /* TREE_CST_RTL probably contains a constant address.
6290 On RISC machines where a constant address isn't valid,
6291 make some insns to get that address into a register. */
6292 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6293 && modifier != EXPAND_CONST_ADDRESS
6294 && modifier != EXPAND_INITIALIZER
6295 && modifier != EXPAND_SUM
6296 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6297 || (flag_force_addr
6298 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6299 return change_address (TREE_CST_RTL (exp), VOIDmode,
6300 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6301 return TREE_CST_RTL (exp);
6303 case EXPR_WITH_FILE_LOCATION:
6305 rtx to_return;
6306 const char *saved_input_filename = input_filename;
6307 int saved_lineno = lineno;
6308 input_filename = EXPR_WFL_FILENAME (exp);
6309 lineno = EXPR_WFL_LINENO (exp);
6310 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6311 emit_line_note (input_filename, lineno);
6312 /* Possibly avoid switching back and force here. */
6313 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6314 input_filename = saved_input_filename;
6315 lineno = saved_lineno;
6316 return to_return;
6319 case SAVE_EXPR:
6320 context = decl_function_context (exp);
6322 /* If this SAVE_EXPR was at global context, assume we are an
6323 initialization function and move it into our context. */
6324 if (context == 0)
6325 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6327 /* We treat inline_function_decl as an alias for the current function
6328 because that is the inline function whose vars, types, etc.
6329 are being merged into the current function.
6330 See expand_inline_function. */
6331 if (context == current_function_decl || context == inline_function_decl)
6332 context = 0;
6334 /* If this is non-local, handle it. */
6335 if (context)
6337 /* The following call just exists to abort if the context is
6338 not of a containing function. */
6339 find_function_data (context);
6341 temp = SAVE_EXPR_RTL (exp);
6342 if (temp && GET_CODE (temp) == REG)
6344 put_var_into_stack (exp);
6345 temp = SAVE_EXPR_RTL (exp);
6347 if (temp == 0 || GET_CODE (temp) != MEM)
6348 abort ();
6349 return change_address (temp, mode,
6350 fix_lexical_addr (XEXP (temp, 0), exp));
6352 if (SAVE_EXPR_RTL (exp) == 0)
6354 if (mode == VOIDmode)
6355 temp = const0_rtx;
6356 else
6357 temp = assign_temp (build_qualified_type (type,
6358 (TYPE_QUALS (type)
6359 | TYPE_QUAL_CONST)),
6360 3, 0, 0);
6362 SAVE_EXPR_RTL (exp) = temp;
6363 if (!optimize && GET_CODE (temp) == REG)
6364 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6365 save_expr_regs);
6367 /* If the mode of TEMP does not match that of the expression, it
6368 must be a promoted value. We pass store_expr a SUBREG of the
6369 wanted mode but mark it so that we know that it was already
6370 extended. Note that `unsignedp' was modified above in
6371 this case. */
6373 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6375 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6376 SUBREG_PROMOTED_VAR_P (temp) = 1;
6377 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6380 if (temp == const0_rtx)
6381 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6382 EXPAND_MEMORY_USE_BAD);
6383 else
6384 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6386 TREE_USED (exp) = 1;
6389 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6390 must be a promoted value. We return a SUBREG of the wanted mode,
6391 but mark it so that we know that it was already extended. */
6393 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6394 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6396 /* Compute the signedness and make the proper SUBREG. */
6397 promote_mode (type, mode, &unsignedp, 0);
6398 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6399 SUBREG_PROMOTED_VAR_P (temp) = 1;
6400 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6401 return temp;
6404 return SAVE_EXPR_RTL (exp);
6406 case UNSAVE_EXPR:
6408 rtx temp;
6409 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6410 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6411 return temp;
6414 case PLACEHOLDER_EXPR:
6416 tree placeholder_expr;
6418 /* If there is an object on the head of the placeholder list,
6419 see if some object in it of type TYPE or a pointer to it. For
6420 further information, see tree.def. */
6421 for (placeholder_expr = placeholder_list;
6422 placeholder_expr != 0;
6423 placeholder_expr = TREE_CHAIN (placeholder_expr))
6425 tree need_type = TYPE_MAIN_VARIANT (type);
6426 tree object = 0;
6427 tree old_list = placeholder_list;
6428 tree elt;
6430 /* Find the outermost reference that is of the type we want.
6431 If none, see if any object has a type that is a pointer to
6432 the type we want. */
6433 for (elt = TREE_PURPOSE (placeholder_expr);
6434 elt != 0 && object == 0;
6436 = ((TREE_CODE (elt) == COMPOUND_EXPR
6437 || TREE_CODE (elt) == COND_EXPR)
6438 ? TREE_OPERAND (elt, 1)
6439 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6440 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6441 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6442 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6443 ? TREE_OPERAND (elt, 0) : 0))
6444 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6445 object = elt;
6447 for (elt = TREE_PURPOSE (placeholder_expr);
6448 elt != 0 && object == 0;
6450 = ((TREE_CODE (elt) == COMPOUND_EXPR
6451 || TREE_CODE (elt) == COND_EXPR)
6452 ? TREE_OPERAND (elt, 1)
6453 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6454 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6455 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6456 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6457 ? TREE_OPERAND (elt, 0) : 0))
6458 if (POINTER_TYPE_P (TREE_TYPE (elt))
6459 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6460 == need_type))
6461 object = build1 (INDIRECT_REF, need_type, elt);
6463 if (object != 0)
6465 /* Expand this object skipping the list entries before
6466 it was found in case it is also a PLACEHOLDER_EXPR.
6467 In that case, we want to translate it using subsequent
6468 entries. */
6469 placeholder_list = TREE_CHAIN (placeholder_expr);
6470 temp = expand_expr (object, original_target, tmode,
6471 ro_modifier);
6472 placeholder_list = old_list;
6473 return temp;
6478 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6479 abort ();
6481 case WITH_RECORD_EXPR:
6482 /* Put the object on the placeholder list, expand our first operand,
6483 and pop the list. */
6484 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6485 placeholder_list);
6486 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6487 tmode, ro_modifier);
6488 placeholder_list = TREE_CHAIN (placeholder_list);
6489 return target;
6491 case GOTO_EXPR:
6492 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6493 expand_goto (TREE_OPERAND (exp, 0));
6494 else
6495 expand_computed_goto (TREE_OPERAND (exp, 0));
6496 return const0_rtx;
6498 case EXIT_EXPR:
6499 expand_exit_loop_if_false (NULL_PTR,
6500 invert_truthvalue (TREE_OPERAND (exp, 0)));
6501 return const0_rtx;
6503 case LABELED_BLOCK_EXPR:
6504 if (LABELED_BLOCK_BODY (exp))
6505 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6506 /* Should perhaps use expand_label, but this is simpler and safer. */
6507 do_pending_stack_adjust ();
6508 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6509 return const0_rtx;
6511 case EXIT_BLOCK_EXPR:
6512 if (EXIT_BLOCK_RETURN (exp))
6513 sorry ("returned value in block_exit_expr");
6514 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6515 return const0_rtx;
6517 case LOOP_EXPR:
6518 push_temp_slots ();
6519 expand_start_loop (1);
6520 expand_expr_stmt (TREE_OPERAND (exp, 0));
6521 expand_end_loop ();
6522 pop_temp_slots ();
6524 return const0_rtx;
6526 case BIND_EXPR:
6528 tree vars = TREE_OPERAND (exp, 0);
6529 int vars_need_expansion = 0;
6531 /* Need to open a binding contour here because
6532 if there are any cleanups they must be contained here. */
6533 expand_start_bindings (2);
6535 /* Mark the corresponding BLOCK for output in its proper place. */
6536 if (TREE_OPERAND (exp, 2) != 0
6537 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6538 insert_block (TREE_OPERAND (exp, 2));
6540 /* If VARS have not yet been expanded, expand them now. */
6541 while (vars)
6543 if (!DECL_RTL_SET_P (vars))
6545 vars_need_expansion = 1;
6546 expand_decl (vars);
6548 expand_decl_init (vars);
6549 vars = TREE_CHAIN (vars);
6552 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6554 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6556 return temp;
6559 case RTL_EXPR:
6560 if (RTL_EXPR_SEQUENCE (exp))
6562 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6563 abort ();
6564 emit_insns (RTL_EXPR_SEQUENCE (exp));
6565 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6567 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6568 free_temps_for_rtl_expr (exp);
6569 return RTL_EXPR_RTL (exp);
6571 case CONSTRUCTOR:
6572 /* If we don't need the result, just ensure we evaluate any
6573 subexpressions. */
6574 if (ignore)
6576 tree elt;
6577 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6578 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6579 EXPAND_MEMORY_USE_BAD);
6580 return const0_rtx;
6583 /* All elts simple constants => refer to a constant in memory. But
6584 if this is a non-BLKmode mode, let it store a field at a time
6585 since that should make a CONST_INT or CONST_DOUBLE when we
6586 fold. Likewise, if we have a target we can use, it is best to
6587 store directly into the target unless the type is large enough
6588 that memcpy will be used. If we are making an initializer and
6589 all operands are constant, put it in memory as well. */
6590 else if ((TREE_STATIC (exp)
6591 && ((mode == BLKmode
6592 && ! (target != 0 && safe_from_p (target, exp, 1)))
6593 || TREE_ADDRESSABLE (exp)
6594 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6595 && (! MOVE_BY_PIECES_P
6596 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6597 TYPE_ALIGN (type)))
6598 && ! mostly_zeros_p (exp))))
6599 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6601 rtx constructor = output_constant_def (exp, 1);
6603 if (modifier != EXPAND_CONST_ADDRESS
6604 && modifier != EXPAND_INITIALIZER
6605 && modifier != EXPAND_SUM
6606 && (! memory_address_p (GET_MODE (constructor),
6607 XEXP (constructor, 0))
6608 || (flag_force_addr
6609 && GET_CODE (XEXP (constructor, 0)) != REG)))
6610 constructor = change_address (constructor, VOIDmode,
6611 XEXP (constructor, 0));
6612 return constructor;
6614 else
6616 /* Handle calls that pass values in multiple non-contiguous
6617 locations. The Irix 6 ABI has examples of this. */
6618 if (target == 0 || ! safe_from_p (target, exp, 1)
6619 || GET_CODE (target) == PARALLEL)
6620 target
6621 = assign_temp (build_qualified_type (type,
6622 (TYPE_QUALS (type)
6623 | (TREE_READONLY (exp)
6624 * TYPE_QUAL_CONST))),
6625 TREE_ADDRESSABLE (exp), 1, 1);
6627 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6628 int_size_in_bytes (TREE_TYPE (exp)));
6629 return target;
6632 case INDIRECT_REF:
6634 tree exp1 = TREE_OPERAND (exp, 0);
6635 tree index;
6636 tree string = string_constant (exp1, &index);
6638 /* Try to optimize reads from const strings. */
6639 if (string
6640 && TREE_CODE (string) == STRING_CST
6641 && TREE_CODE (index) == INTEGER_CST
6642 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6643 && GET_MODE_CLASS (mode) == MODE_INT
6644 && GET_MODE_SIZE (mode) == 1
6645 && modifier != EXPAND_MEMORY_USE_WO)
6646 return
6647 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6649 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6650 op0 = memory_address (mode, op0);
6652 if (cfun && current_function_check_memory_usage
6653 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6655 enum memory_use_mode memory_usage;
6656 memory_usage = get_memory_usage_from_modifier (modifier);
6658 if (memory_usage != MEMORY_USE_DONT)
6660 in_check_memory_usage = 1;
6661 emit_library_call (chkr_check_addr_libfunc,
6662 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6663 Pmode, GEN_INT (int_size_in_bytes (type)),
6664 TYPE_MODE (sizetype),
6665 GEN_INT (memory_usage),
6666 TYPE_MODE (integer_type_node));
6667 in_check_memory_usage = 0;
6671 temp = gen_rtx_MEM (mode, op0);
6672 set_mem_attributes (temp, exp, 0);
6674 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6675 here, because, in C and C++, the fact that a location is accessed
6676 through a pointer to const does not mean that the value there can
6677 never change. Languages where it can never change should
6678 also set TREE_STATIC. */
6679 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6681 /* If we are writing to this object and its type is a record with
6682 readonly fields, we must mark it as readonly so it will
6683 conflict with readonly references to those fields. */
6684 if (modifier == EXPAND_MEMORY_USE_WO && readonly_fields_p (type))
6685 RTX_UNCHANGING_P (temp) = 1;
6687 return temp;
6690 case ARRAY_REF:
6691 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6692 abort ();
6695 tree array = TREE_OPERAND (exp, 0);
6696 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6697 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6698 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6699 HOST_WIDE_INT i;
6701 /* Optimize the special-case of a zero lower bound.
6703 We convert the low_bound to sizetype to avoid some problems
6704 with constant folding. (E.g. suppose the lower bound is 1,
6705 and its mode is QI. Without the conversion, (ARRAY
6706 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6707 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6709 if (! integer_zerop (low_bound))
6710 index = size_diffop (index, convert (sizetype, low_bound));
6712 /* Fold an expression like: "foo"[2].
6713 This is not done in fold so it won't happen inside &.
6714 Don't fold if this is for wide characters since it's too
6715 difficult to do correctly and this is a very rare case. */
6717 if (TREE_CODE (array) == STRING_CST
6718 && TREE_CODE (index) == INTEGER_CST
6719 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6720 && GET_MODE_CLASS (mode) == MODE_INT
6721 && GET_MODE_SIZE (mode) == 1)
6722 return
6723 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6725 /* If this is a constant index into a constant array,
6726 just get the value from the array. Handle both the cases when
6727 we have an explicit constructor and when our operand is a variable
6728 that was declared const. */
6730 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6731 && TREE_CODE (index) == INTEGER_CST
6732 && 0 > compare_tree_int (index,
6733 list_length (CONSTRUCTOR_ELTS
6734 (TREE_OPERAND (exp, 0)))))
6736 tree elem;
6738 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6739 i = TREE_INT_CST_LOW (index);
6740 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6743 if (elem)
6744 return expand_expr (fold (TREE_VALUE (elem)), target,
6745 tmode, ro_modifier);
6748 else if (optimize >= 1
6749 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6750 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6751 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6753 if (TREE_CODE (index) == INTEGER_CST)
6755 tree init = DECL_INITIAL (array);
6757 if (TREE_CODE (init) == CONSTRUCTOR)
6759 tree elem;
6761 for (elem = CONSTRUCTOR_ELTS (init);
6762 (elem
6763 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6764 elem = TREE_CHAIN (elem))
6767 if (elem && !TREE_SIDE_EFFECTS (elem))
6768 return expand_expr (fold (TREE_VALUE (elem)), target,
6769 tmode, ro_modifier);
6771 else if (TREE_CODE (init) == STRING_CST
6772 && 0 > compare_tree_int (index,
6773 TREE_STRING_LENGTH (init)))
6775 tree type = TREE_TYPE (TREE_TYPE (init));
6776 enum machine_mode mode = TYPE_MODE (type);
6778 if (GET_MODE_CLASS (mode) == MODE_INT
6779 && GET_MODE_SIZE (mode) == 1)
6780 return (GEN_INT
6781 (TREE_STRING_POINTER
6782 (init)[TREE_INT_CST_LOW (index)]));
6787 /* Fall through. */
6789 case COMPONENT_REF:
6790 case BIT_FIELD_REF:
6791 /* If the operand is a CONSTRUCTOR, we can just extract the
6792 appropriate field if it is present. Don't do this if we have
6793 already written the data since we want to refer to that copy
6794 and varasm.c assumes that's what we'll do. */
6795 if (code != ARRAY_REF
6796 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6797 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6799 tree elt;
6801 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6802 elt = TREE_CHAIN (elt))
6803 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6804 /* We can normally use the value of the field in the
6805 CONSTRUCTOR. However, if this is a bitfield in
6806 an integral mode that we can fit in a HOST_WIDE_INT,
6807 we must mask only the number of bits in the bitfield,
6808 since this is done implicitly by the constructor. If
6809 the bitfield does not meet either of those conditions,
6810 we can't do this optimization. */
6811 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6812 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6813 == MODE_INT)
6814 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6815 <= HOST_BITS_PER_WIDE_INT))))
6817 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6818 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6820 HOST_WIDE_INT bitsize
6821 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6823 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6825 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6826 op0 = expand_and (op0, op1, target);
6828 else
6830 enum machine_mode imode
6831 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6832 tree count
6833 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6836 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6837 target, 0);
6838 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6839 target, 0);
6843 return op0;
6848 enum machine_mode mode1;
6849 HOST_WIDE_INT bitsize, bitpos;
6850 tree offset;
6851 int volatilep = 0;
6852 unsigned int alignment;
6853 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6854 &mode1, &unsignedp, &volatilep,
6855 &alignment);
6857 /* If we got back the original object, something is wrong. Perhaps
6858 we are evaluating an expression too early. In any event, don't
6859 infinitely recurse. */
6860 if (tem == exp)
6861 abort ();
6863 /* If TEM's type is a union of variable size, pass TARGET to the inner
6864 computation, since it will need a temporary and TARGET is known
6865 to have to do. This occurs in unchecked conversion in Ada. */
6867 op0 = expand_expr (tem,
6868 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6869 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6870 != INTEGER_CST)
6871 ? target : NULL_RTX),
6872 VOIDmode,
6873 (modifier == EXPAND_INITIALIZER
6874 || modifier == EXPAND_CONST_ADDRESS)
6875 ? modifier : EXPAND_NORMAL);
6877 /* If this is a constant, put it into a register if it is a
6878 legitimate constant and OFFSET is 0 and memory if it isn't. */
6879 if (CONSTANT_P (op0))
6881 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6882 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6883 && offset == 0)
6884 op0 = force_reg (mode, op0);
6885 else
6886 op0 = validize_mem (force_const_mem (mode, op0));
6889 if (offset != 0)
6891 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6893 /* If this object is in memory, put it into a register.
6894 This case can't occur in C, but can in Ada if we have
6895 unchecked conversion of an expression from a scalar type to
6896 an array or record type. */
6897 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6898 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6900 tree nt = build_qualified_type (TREE_TYPE (tem),
6901 (TYPE_QUALS (TREE_TYPE (tem))
6902 | TYPE_QUAL_CONST));
6903 rtx memloc = assign_temp (nt, 1, 1, 1);
6905 mark_temp_addr_taken (memloc);
6906 emit_move_insn (memloc, op0);
6907 op0 = memloc;
6910 if (GET_CODE (op0) != MEM)
6911 abort ();
6913 if (GET_MODE (offset_rtx) != ptr_mode)
6915 #ifdef POINTERS_EXTEND_UNSIGNED
6916 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6917 #else
6918 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6919 #endif
6922 /* A constant address in OP0 can have VOIDmode, we must not try
6923 to call force_reg for that case. Avoid that case. */
6924 if (GET_CODE (op0) == MEM
6925 && GET_MODE (op0) == BLKmode
6926 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6927 && bitsize != 0
6928 && (bitpos % bitsize) == 0
6929 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6930 && alignment == GET_MODE_ALIGNMENT (mode1))
6932 rtx temp = change_address (op0, mode1,
6933 plus_constant (XEXP (op0, 0),
6934 (bitpos /
6935 BITS_PER_UNIT)));
6936 if (GET_CODE (XEXP (temp, 0)) == REG)
6937 op0 = temp;
6938 else
6939 op0 = change_address (op0, mode1,
6940 force_reg (GET_MODE (XEXP (temp, 0)),
6941 XEXP (temp, 0)));
6942 bitpos = 0;
6945 op0 = change_address (op0, VOIDmode,
6946 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6947 force_reg (ptr_mode,
6948 offset_rtx)));
6951 /* Don't forget about volatility even if this is a bitfield. */
6952 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6954 op0 = copy_rtx (op0);
6955 MEM_VOLATILE_P (op0) = 1;
6958 /* Check the access. */
6959 if (cfun != 0 && current_function_check_memory_usage
6960 && GET_CODE (op0) == MEM)
6962 enum memory_use_mode memory_usage;
6963 memory_usage = get_memory_usage_from_modifier (modifier);
6965 if (memory_usage != MEMORY_USE_DONT)
6967 rtx to;
6968 int size;
6970 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6971 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6973 /* Check the access right of the pointer. */
6974 in_check_memory_usage = 1;
6975 if (size > BITS_PER_UNIT)
6976 emit_library_call (chkr_check_addr_libfunc,
6977 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
6978 Pmode, GEN_INT (size / BITS_PER_UNIT),
6979 TYPE_MODE (sizetype),
6980 GEN_INT (memory_usage),
6981 TYPE_MODE (integer_type_node));
6982 in_check_memory_usage = 0;
6986 /* In cases where an aligned union has an unaligned object
6987 as a field, we might be extracting a BLKmode value from
6988 an integer-mode (e.g., SImode) object. Handle this case
6989 by doing the extract into an object as wide as the field
6990 (which we know to be the width of a basic mode), then
6991 storing into memory, and changing the mode to BLKmode.
6992 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6993 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6994 if (mode1 == VOIDmode
6995 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6996 || (modifier != EXPAND_CONST_ADDRESS
6997 && modifier != EXPAND_INITIALIZER
6998 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6999 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7000 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
7001 /* If the field isn't aligned enough to fetch as a memref,
7002 fetch it as a bit field. */
7003 || (mode1 != BLKmode
7004 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
7005 && ((TYPE_ALIGN (TREE_TYPE (tem))
7006 < GET_MODE_ALIGNMENT (mode))
7007 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7008 /* If the type and the field are a constant size and the
7009 size of the type isn't the same size as the bitfield,
7010 we must use bitfield operations. */
7011 || ((bitsize >= 0
7012 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7013 == INTEGER_CST)
7014 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7015 bitsize)))))
7016 || (modifier != EXPAND_CONST_ADDRESS
7017 && modifier != EXPAND_INITIALIZER
7018 && mode == BLKmode
7019 && SLOW_UNALIGNED_ACCESS (mode, alignment)
7020 && (TYPE_ALIGN (type) > alignment
7021 || bitpos % TYPE_ALIGN (type) != 0)))
7023 enum machine_mode ext_mode = mode;
7025 if (ext_mode == BLKmode
7026 && ! (target != 0 && GET_CODE (op0) == MEM
7027 && GET_CODE (target) == MEM
7028 && bitpos % BITS_PER_UNIT == 0))
7029 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7031 if (ext_mode == BLKmode)
7033 /* In this case, BITPOS must start at a byte boundary and
7034 TARGET, if specified, must be a MEM. */
7035 if (GET_CODE (op0) != MEM
7036 || (target != 0 && GET_CODE (target) != MEM)
7037 || bitpos % BITS_PER_UNIT != 0)
7038 abort ();
7040 op0 = change_address (op0, VOIDmode,
7041 plus_constant (XEXP (op0, 0),
7042 bitpos / BITS_PER_UNIT));
7043 if (target == 0)
7044 target = assign_temp (type, 0, 1, 1);
7046 emit_block_move (target, op0,
7047 bitsize == -1 ? expr_size (exp)
7048 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7049 / BITS_PER_UNIT),
7050 BITS_PER_UNIT);
7052 return target;
7055 op0 = validize_mem (op0);
7057 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7058 mark_reg_pointer (XEXP (op0, 0), alignment);
7060 op0 = extract_bit_field (op0, bitsize, bitpos,
7061 unsignedp, target, ext_mode, ext_mode,
7062 alignment,
7063 int_size_in_bytes (TREE_TYPE (tem)));
7065 /* If the result is a record type and BITSIZE is narrower than
7066 the mode of OP0, an integral mode, and this is a big endian
7067 machine, we must put the field into the high-order bits. */
7068 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7069 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7070 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
7071 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7072 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7073 - bitsize),
7074 op0, 1);
7076 if (mode == BLKmode)
7078 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
7079 TYPE_QUAL_CONST);
7080 rtx new = assign_temp (nt, 0, 1, 1);
7082 emit_move_insn (new, op0);
7083 op0 = copy_rtx (new);
7084 PUT_MODE (op0, BLKmode);
7087 return op0;
7090 /* If the result is BLKmode, use that to access the object
7091 now as well. */
7092 if (mode == BLKmode)
7093 mode1 = BLKmode;
7095 /* Get a reference to just this component. */
7096 if (modifier == EXPAND_CONST_ADDRESS
7097 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7099 rtx new = gen_rtx_MEM (mode1,
7100 plus_constant (XEXP (op0, 0),
7101 (bitpos / BITS_PER_UNIT)));
7103 MEM_COPY_ATTRIBUTES (new, op0);
7104 op0 = new;
7106 else
7107 op0 = change_address (op0, mode1,
7108 plus_constant (XEXP (op0, 0),
7109 (bitpos / BITS_PER_UNIT)));
7111 set_mem_attributes (op0, exp, 0);
7112 if (GET_CODE (XEXP (op0, 0)) == REG)
7113 mark_reg_pointer (XEXP (op0, 0), alignment);
7115 MEM_VOLATILE_P (op0) |= volatilep;
7116 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7117 || modifier == EXPAND_CONST_ADDRESS
7118 || modifier == EXPAND_INITIALIZER)
7119 return op0;
7120 else if (target == 0)
7121 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7123 convert_move (target, op0, unsignedp);
7124 return target;
7127 /* Intended for a reference to a buffer of a file-object in Pascal.
7128 But it's not certain that a special tree code will really be
7129 necessary for these. INDIRECT_REF might work for them. */
7130 case BUFFER_REF:
7131 abort ();
7133 case IN_EXPR:
7135 /* Pascal set IN expression.
7137 Algorithm:
7138 rlo = set_low - (set_low%bits_per_word);
7139 the_word = set [ (index - rlo)/bits_per_word ];
7140 bit_index = index % bits_per_word;
7141 bitmask = 1 << bit_index;
7142 return !!(the_word & bitmask); */
7144 tree set = TREE_OPERAND (exp, 0);
7145 tree index = TREE_OPERAND (exp, 1);
7146 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7147 tree set_type = TREE_TYPE (set);
7148 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7149 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7150 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7151 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7152 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7153 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7154 rtx setaddr = XEXP (setval, 0);
7155 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7156 rtx rlow;
7157 rtx diff, quo, rem, addr, bit, result;
7159 /* If domain is empty, answer is no. Likewise if index is constant
7160 and out of bounds. */
7161 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7162 && TREE_CODE (set_low_bound) == INTEGER_CST
7163 && tree_int_cst_lt (set_high_bound, set_low_bound))
7164 || (TREE_CODE (index) == INTEGER_CST
7165 && TREE_CODE (set_low_bound) == INTEGER_CST
7166 && tree_int_cst_lt (index, set_low_bound))
7167 || (TREE_CODE (set_high_bound) == INTEGER_CST
7168 && TREE_CODE (index) == INTEGER_CST
7169 && tree_int_cst_lt (set_high_bound, index))))
7170 return const0_rtx;
7172 if (target == 0)
7173 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7175 /* If we get here, we have to generate the code for both cases
7176 (in range and out of range). */
7178 op0 = gen_label_rtx ();
7179 op1 = gen_label_rtx ();
7181 if (! (GET_CODE (index_val) == CONST_INT
7182 && GET_CODE (lo_r) == CONST_INT))
7184 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7185 GET_MODE (index_val), iunsignedp, 0, op1);
7188 if (! (GET_CODE (index_val) == CONST_INT
7189 && GET_CODE (hi_r) == CONST_INT))
7191 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7192 GET_MODE (index_val), iunsignedp, 0, op1);
7195 /* Calculate the element number of bit zero in the first word
7196 of the set. */
7197 if (GET_CODE (lo_r) == CONST_INT)
7198 rlow = GEN_INT (INTVAL (lo_r)
7199 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7200 else
7201 rlow = expand_binop (index_mode, and_optab, lo_r,
7202 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7203 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7205 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7206 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7208 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7209 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7210 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7211 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7213 addr = memory_address (byte_mode,
7214 expand_binop (index_mode, add_optab, diff,
7215 setaddr, NULL_RTX, iunsignedp,
7216 OPTAB_LIB_WIDEN));
7218 /* Extract the bit we want to examine. */
7219 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7220 gen_rtx_MEM (byte_mode, addr),
7221 make_tree (TREE_TYPE (index), rem),
7222 NULL_RTX, 1);
7223 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7224 GET_MODE (target) == byte_mode ? target : 0,
7225 1, OPTAB_LIB_WIDEN);
7227 if (result != target)
7228 convert_move (target, result, 1);
7230 /* Output the code to handle the out-of-range case. */
7231 emit_jump (op0);
7232 emit_label (op1);
7233 emit_move_insn (target, const0_rtx);
7234 emit_label (op0);
7235 return target;
7238 case WITH_CLEANUP_EXPR:
7239 if (RTL_EXPR_RTL (exp) == 0)
7241 RTL_EXPR_RTL (exp)
7242 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7243 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
7245 /* That's it for this cleanup. */
7246 TREE_OPERAND (exp, 2) = 0;
7248 return RTL_EXPR_RTL (exp);
7250 case CLEANUP_POINT_EXPR:
7252 /* Start a new binding layer that will keep track of all cleanup
7253 actions to be performed. */
7254 expand_start_bindings (2);
7256 target_temp_slot_level = temp_slot_level;
7258 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7259 /* If we're going to use this value, load it up now. */
7260 if (! ignore)
7261 op0 = force_not_mem (op0);
7262 preserve_temp_slots (op0);
7263 expand_end_bindings (NULL_TREE, 0, 0);
7265 return op0;
7267 case CALL_EXPR:
7268 /* Check for a built-in function. */
7269 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7270 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7271 == FUNCTION_DECL)
7272 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7274 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7275 == BUILT_IN_FRONTEND)
7276 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7277 else
7278 return expand_builtin (exp, target, subtarget, tmode, ignore);
7281 return expand_call (exp, target, ignore);
7283 case NON_LVALUE_EXPR:
7284 case NOP_EXPR:
7285 case CONVERT_EXPR:
7286 case REFERENCE_EXPR:
7287 if (TREE_OPERAND (exp, 0) == error_mark_node)
7288 return const0_rtx;
7290 if (TREE_CODE (type) == UNION_TYPE)
7292 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7294 /* If both input and output are BLKmode, this conversion
7295 isn't actually doing anything unless we need to make the
7296 alignment stricter. */
7297 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7298 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7299 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7300 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7301 modifier);
7303 if (target == 0)
7304 target = assign_temp (type, 0, 1, 1);
7306 if (GET_CODE (target) == MEM)
7307 /* Store data into beginning of memory target. */
7308 store_expr (TREE_OPERAND (exp, 0),
7309 change_address (target, TYPE_MODE (valtype), 0), 0);
7311 else if (GET_CODE (target) == REG)
7312 /* Store this field into a union of the proper type. */
7313 store_field (target,
7314 MIN ((int_size_in_bytes (TREE_TYPE
7315 (TREE_OPERAND (exp, 0)))
7316 * BITS_PER_UNIT),
7317 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7318 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7319 VOIDmode, 0, BITS_PER_UNIT,
7320 int_size_in_bytes (type), 0);
7321 else
7322 abort ();
7324 /* Return the entire union. */
7325 return target;
7328 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7330 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7331 ro_modifier);
7333 /* If the signedness of the conversion differs and OP0 is
7334 a promoted SUBREG, clear that indication since we now
7335 have to do the proper extension. */
7336 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7337 && GET_CODE (op0) == SUBREG)
7338 SUBREG_PROMOTED_VAR_P (op0) = 0;
7340 return op0;
7343 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7344 if (GET_MODE (op0) == mode)
7345 return op0;
7347 /* If OP0 is a constant, just convert it into the proper mode. */
7348 if (CONSTANT_P (op0))
7349 return
7350 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7351 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7353 if (modifier == EXPAND_INITIALIZER)
7354 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7356 if (target == 0)
7357 return
7358 convert_to_mode (mode, op0,
7359 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7360 else
7361 convert_move (target, op0,
7362 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7363 return target;
7365 case PLUS_EXPR:
7366 /* We come here from MINUS_EXPR when the second operand is a
7367 constant. */
7368 plus_expr:
7369 this_optab = ! unsignedp && flag_trapv
7370 && (GET_MODE_CLASS(mode) == MODE_INT)
7371 ? addv_optab : add_optab;
7373 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7374 something else, make sure we add the register to the constant and
7375 then to the other thing. This case can occur during strength
7376 reduction and doing it this way will produce better code if the
7377 frame pointer or argument pointer is eliminated.
7379 fold-const.c will ensure that the constant is always in the inner
7380 PLUS_EXPR, so the only case we need to do anything about is if
7381 sp, ap, or fp is our second argument, in which case we must swap
7382 the innermost first argument and our second argument. */
7384 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7385 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7386 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7387 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7388 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7389 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7391 tree t = TREE_OPERAND (exp, 1);
7393 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7394 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7397 /* If the result is to be ptr_mode and we are adding an integer to
7398 something, we might be forming a constant. So try to use
7399 plus_constant. If it produces a sum and we can't accept it,
7400 use force_operand. This allows P = &ARR[const] to generate
7401 efficient code on machines where a SYMBOL_REF is not a valid
7402 address.
7404 If this is an EXPAND_SUM call, always return the sum. */
7405 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7406 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7408 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7409 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7410 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7412 rtx constant_part;
7414 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7415 EXPAND_SUM);
7416 /* Use immed_double_const to ensure that the constant is
7417 truncated according to the mode of OP1, then sign extended
7418 to a HOST_WIDE_INT. Using the constant directly can result
7419 in non-canonical RTL in a 64x32 cross compile. */
7420 constant_part
7421 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7422 (HOST_WIDE_INT) 0,
7423 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7424 op1 = plus_constant (op1, INTVAL (constant_part));
7425 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7426 op1 = force_operand (op1, target);
7427 return op1;
7430 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7431 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7432 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7434 rtx constant_part;
7436 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7437 EXPAND_SUM);
7438 if (! CONSTANT_P (op0))
7440 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7441 VOIDmode, modifier);
7442 /* Don't go to both_summands if modifier
7443 says it's not right to return a PLUS. */
7444 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7445 goto binop2;
7446 goto both_summands;
7448 /* Use immed_double_const to ensure that the constant is
7449 truncated according to the mode of OP1, then sign extended
7450 to a HOST_WIDE_INT. Using the constant directly can result
7451 in non-canonical RTL in a 64x32 cross compile. */
7452 constant_part
7453 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7454 (HOST_WIDE_INT) 0,
7455 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7456 op0 = plus_constant (op0, INTVAL (constant_part));
7457 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7458 op0 = force_operand (op0, target);
7459 return op0;
7463 /* No sense saving up arithmetic to be done
7464 if it's all in the wrong mode to form part of an address.
7465 And force_operand won't know whether to sign-extend or
7466 zero-extend. */
7467 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7468 || mode != ptr_mode)
7469 goto binop;
7471 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7472 subtarget = 0;
7474 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7475 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7477 both_summands:
7478 /* Make sure any term that's a sum with a constant comes last. */
7479 if (GET_CODE (op0) == PLUS
7480 && CONSTANT_P (XEXP (op0, 1)))
7482 temp = op0;
7483 op0 = op1;
7484 op1 = temp;
7486 /* If adding to a sum including a constant,
7487 associate it to put the constant outside. */
7488 if (GET_CODE (op1) == PLUS
7489 && CONSTANT_P (XEXP (op1, 1)))
7491 rtx constant_term = const0_rtx;
7493 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7494 if (temp != 0)
7495 op0 = temp;
7496 /* Ensure that MULT comes first if there is one. */
7497 else if (GET_CODE (op0) == MULT)
7498 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7499 else
7500 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7502 /* Let's also eliminate constants from op0 if possible. */
7503 op0 = eliminate_constant_term (op0, &constant_term);
7505 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7506 their sum should be a constant. Form it into OP1, since the
7507 result we want will then be OP0 + OP1. */
7509 temp = simplify_binary_operation (PLUS, mode, constant_term,
7510 XEXP (op1, 1));
7511 if (temp != 0)
7512 op1 = temp;
7513 else
7514 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7517 /* Put a constant term last and put a multiplication first. */
7518 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7519 temp = op1, op1 = op0, op0 = temp;
7521 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7522 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7524 case MINUS_EXPR:
7525 /* For initializers, we are allowed to return a MINUS of two
7526 symbolic constants. Here we handle all cases when both operands
7527 are constant. */
7528 /* Handle difference of two symbolic constants,
7529 for the sake of an initializer. */
7530 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7531 && really_constant_p (TREE_OPERAND (exp, 0))
7532 && really_constant_p (TREE_OPERAND (exp, 1)))
7534 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7535 VOIDmode, ro_modifier);
7536 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7537 VOIDmode, ro_modifier);
7539 /* If the last operand is a CONST_INT, use plus_constant of
7540 the negated constant. Else make the MINUS. */
7541 if (GET_CODE (op1) == CONST_INT)
7542 return plus_constant (op0, - INTVAL (op1));
7543 else
7544 return gen_rtx_MINUS (mode, op0, op1);
7546 /* Convert A - const to A + (-const). */
7547 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7549 tree negated = fold (build1 (NEGATE_EXPR, type,
7550 TREE_OPERAND (exp, 1)));
7552 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7553 /* If we can't negate the constant in TYPE, leave it alone and
7554 expand_binop will negate it for us. We used to try to do it
7555 here in the signed version of TYPE, but that doesn't work
7556 on POINTER_TYPEs. */;
7557 else
7559 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7560 goto plus_expr;
7563 this_optab = ! unsignedp && flag_trapv
7564 && (GET_MODE_CLASS(mode) == MODE_INT)
7565 ? subv_optab : sub_optab;
7566 goto binop;
7568 case MULT_EXPR:
7569 /* If first operand is constant, swap them.
7570 Thus the following special case checks need only
7571 check the second operand. */
7572 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7574 register tree t1 = TREE_OPERAND (exp, 0);
7575 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7576 TREE_OPERAND (exp, 1) = t1;
7579 /* Attempt to return something suitable for generating an
7580 indexed address, for machines that support that. */
7582 if (modifier == EXPAND_SUM && mode == ptr_mode
7583 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7584 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7586 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7587 EXPAND_SUM);
7589 /* Apply distributive law if OP0 is x+c. */
7590 if (GET_CODE (op0) == PLUS
7591 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7592 return
7593 gen_rtx_PLUS
7594 (mode,
7595 gen_rtx_MULT
7596 (mode, XEXP (op0, 0),
7597 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7598 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7599 * INTVAL (XEXP (op0, 1))));
7601 if (GET_CODE (op0) != REG)
7602 op0 = force_operand (op0, NULL_RTX);
7603 if (GET_CODE (op0) != REG)
7604 op0 = copy_to_mode_reg (mode, op0);
7606 return
7607 gen_rtx_MULT (mode, op0,
7608 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7611 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7612 subtarget = 0;
7614 /* Check for multiplying things that have been extended
7615 from a narrower type. If this machine supports multiplying
7616 in that narrower type with a result in the desired type,
7617 do it that way, and avoid the explicit type-conversion. */
7618 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7619 && TREE_CODE (type) == INTEGER_TYPE
7620 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7621 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7622 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7623 && int_fits_type_p (TREE_OPERAND (exp, 1),
7624 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7625 /* Don't use a widening multiply if a shift will do. */
7626 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7627 > HOST_BITS_PER_WIDE_INT)
7628 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7630 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7631 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7633 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7634 /* If both operands are extended, they must either both
7635 be zero-extended or both be sign-extended. */
7636 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7638 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7640 enum machine_mode innermode
7641 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7642 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7643 ? smul_widen_optab : umul_widen_optab);
7644 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7645 ? umul_widen_optab : smul_widen_optab);
7646 if (mode == GET_MODE_WIDER_MODE (innermode))
7648 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7650 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7651 NULL_RTX, VOIDmode, 0);
7652 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7653 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7654 VOIDmode, 0);
7655 else
7656 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7657 NULL_RTX, VOIDmode, 0);
7658 goto binop2;
7660 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7661 && innermode == word_mode)
7663 rtx htem;
7664 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7665 NULL_RTX, VOIDmode, 0);
7666 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7667 op1 = convert_modes (innermode, mode,
7668 expand_expr (TREE_OPERAND (exp, 1),
7669 NULL_RTX, VOIDmode, 0),
7670 unsignedp);
7671 else
7672 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7673 NULL_RTX, VOIDmode, 0);
7674 temp = expand_binop (mode, other_optab, op0, op1, target,
7675 unsignedp, OPTAB_LIB_WIDEN);
7676 htem = expand_mult_highpart_adjust (innermode,
7677 gen_highpart (innermode, temp),
7678 op0, op1,
7679 gen_highpart (innermode, temp),
7680 unsignedp);
7681 emit_move_insn (gen_highpart (innermode, temp), htem);
7682 return temp;
7686 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7687 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7688 return expand_mult (mode, op0, op1, target, unsignedp);
7690 case TRUNC_DIV_EXPR:
7691 case FLOOR_DIV_EXPR:
7692 case CEIL_DIV_EXPR:
7693 case ROUND_DIV_EXPR:
7694 case EXACT_DIV_EXPR:
7695 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7696 subtarget = 0;
7697 /* Possible optimization: compute the dividend with EXPAND_SUM
7698 then if the divisor is constant can optimize the case
7699 where some terms of the dividend have coeffs divisible by it. */
7700 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7701 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7702 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7704 case RDIV_EXPR:
7705 this_optab = flodiv_optab;
7706 goto binop;
7708 case TRUNC_MOD_EXPR:
7709 case FLOOR_MOD_EXPR:
7710 case CEIL_MOD_EXPR:
7711 case ROUND_MOD_EXPR:
7712 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7713 subtarget = 0;
7714 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7715 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7716 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7718 case FIX_ROUND_EXPR:
7719 case FIX_FLOOR_EXPR:
7720 case FIX_CEIL_EXPR:
7721 abort (); /* Not used for C. */
7723 case FIX_TRUNC_EXPR:
7724 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7725 if (target == 0)
7726 target = gen_reg_rtx (mode);
7727 expand_fix (target, op0, unsignedp);
7728 return target;
7730 case FLOAT_EXPR:
7731 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7732 if (target == 0)
7733 target = gen_reg_rtx (mode);
7734 /* expand_float can't figure out what to do if FROM has VOIDmode.
7735 So give it the correct mode. With -O, cse will optimize this. */
7736 if (GET_MODE (op0) == VOIDmode)
7737 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7738 op0);
7739 expand_float (target, op0,
7740 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7741 return target;
7743 case NEGATE_EXPR:
7744 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7745 temp = expand_unop (mode,
7746 ! unsignedp && flag_trapv
7747 && (GET_MODE_CLASS(mode) == MODE_INT)
7748 ? negv_optab : neg_optab, op0, target, 0);
7749 if (temp == 0)
7750 abort ();
7751 return temp;
7753 case ABS_EXPR:
7754 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7756 /* Handle complex values specially. */
7757 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7758 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7759 return expand_complex_abs (mode, op0, target, unsignedp);
7761 /* Unsigned abs is simply the operand. Testing here means we don't
7762 risk generating incorrect code below. */
7763 if (TREE_UNSIGNED (type))
7764 return op0;
7766 return expand_abs (mode, op0, target, unsignedp,
7767 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7769 case MAX_EXPR:
7770 case MIN_EXPR:
7771 target = original_target;
7772 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7773 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7774 || GET_MODE (target) != mode
7775 || (GET_CODE (target) == REG
7776 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7777 target = gen_reg_rtx (mode);
7778 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7779 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7781 /* First try to do it with a special MIN or MAX instruction.
7782 If that does not win, use a conditional jump to select the proper
7783 value. */
7784 this_optab = (TREE_UNSIGNED (type)
7785 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7786 : (code == MIN_EXPR ? smin_optab : smax_optab));
7788 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7789 OPTAB_WIDEN);
7790 if (temp != 0)
7791 return temp;
7793 /* At this point, a MEM target is no longer useful; we will get better
7794 code without it. */
7796 if (GET_CODE (target) == MEM)
7797 target = gen_reg_rtx (mode);
7799 if (target != op0)
7800 emit_move_insn (target, op0);
7802 op0 = gen_label_rtx ();
7804 /* If this mode is an integer too wide to compare properly,
7805 compare word by word. Rely on cse to optimize constant cases. */
7806 if (GET_MODE_CLASS (mode) == MODE_INT
7807 && ! can_compare_p (GE, mode, ccp_jump))
7809 if (code == MAX_EXPR)
7810 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7811 target, op1, NULL_RTX, op0);
7812 else
7813 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7814 op1, target, NULL_RTX, op0);
7816 else
7818 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7819 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7820 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7821 op0);
7823 emit_move_insn (target, op1);
7824 emit_label (op0);
7825 return target;
7827 case BIT_NOT_EXPR:
7828 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7829 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7830 if (temp == 0)
7831 abort ();
7832 return temp;
7834 case FFS_EXPR:
7835 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7836 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7837 if (temp == 0)
7838 abort ();
7839 return temp;
7841 /* ??? Can optimize bitwise operations with one arg constant.
7842 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7843 and (a bitwise1 b) bitwise2 b (etc)
7844 but that is probably not worth while. */
7846 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7847 boolean values when we want in all cases to compute both of them. In
7848 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7849 as actual zero-or-1 values and then bitwise anding. In cases where
7850 there cannot be any side effects, better code would be made by
7851 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7852 how to recognize those cases. */
7854 case TRUTH_AND_EXPR:
7855 case BIT_AND_EXPR:
7856 this_optab = and_optab;
7857 goto binop;
7859 case TRUTH_OR_EXPR:
7860 case BIT_IOR_EXPR:
7861 this_optab = ior_optab;
7862 goto binop;
7864 case TRUTH_XOR_EXPR:
7865 case BIT_XOR_EXPR:
7866 this_optab = xor_optab;
7867 goto binop;
7869 case LSHIFT_EXPR:
7870 case RSHIFT_EXPR:
7871 case LROTATE_EXPR:
7872 case RROTATE_EXPR:
7873 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7874 subtarget = 0;
7875 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7876 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7877 unsignedp);
7879 /* Could determine the answer when only additive constants differ. Also,
7880 the addition of one can be handled by changing the condition. */
7881 case LT_EXPR:
7882 case LE_EXPR:
7883 case GT_EXPR:
7884 case GE_EXPR:
7885 case EQ_EXPR:
7886 case NE_EXPR:
7887 case UNORDERED_EXPR:
7888 case ORDERED_EXPR:
7889 case UNLT_EXPR:
7890 case UNLE_EXPR:
7891 case UNGT_EXPR:
7892 case UNGE_EXPR:
7893 case UNEQ_EXPR:
7894 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7895 if (temp != 0)
7896 return temp;
7898 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7899 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7900 && original_target
7901 && GET_CODE (original_target) == REG
7902 && (GET_MODE (original_target)
7903 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7905 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7906 VOIDmode, 0);
7908 if (temp != original_target)
7909 temp = copy_to_reg (temp);
7911 op1 = gen_label_rtx ();
7912 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7913 GET_MODE (temp), unsignedp, 0, op1);
7914 emit_move_insn (temp, const1_rtx);
7915 emit_label (op1);
7916 return temp;
7919 /* If no set-flag instruction, must generate a conditional
7920 store into a temporary variable. Drop through
7921 and handle this like && and ||. */
7923 case TRUTH_ANDIF_EXPR:
7924 case TRUTH_ORIF_EXPR:
7925 if (! ignore
7926 && (target == 0 || ! safe_from_p (target, exp, 1)
7927 /* Make sure we don't have a hard reg (such as function's return
7928 value) live across basic blocks, if not optimizing. */
7929 || (!optimize && GET_CODE (target) == REG
7930 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7931 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7933 if (target)
7934 emit_clr_insn (target);
7936 op1 = gen_label_rtx ();
7937 jumpifnot (exp, op1);
7939 if (target)
7940 emit_0_to_1_insn (target);
7942 emit_label (op1);
7943 return ignore ? const0_rtx : target;
7945 case TRUTH_NOT_EXPR:
7946 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7947 /* The parser is careful to generate TRUTH_NOT_EXPR
7948 only with operands that are always zero or one. */
7949 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7950 target, 1, OPTAB_LIB_WIDEN);
7951 if (temp == 0)
7952 abort ();
7953 return temp;
7955 case COMPOUND_EXPR:
7956 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7957 emit_queue ();
7958 return expand_expr (TREE_OPERAND (exp, 1),
7959 (ignore ? const0_rtx : target),
7960 VOIDmode, 0);
7962 case COND_EXPR:
7963 /* If we would have a "singleton" (see below) were it not for a
7964 conversion in each arm, bring that conversion back out. */
7965 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7966 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7967 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7968 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7970 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7971 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7973 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
7974 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
7975 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
7976 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
7977 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
7978 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
7979 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
7980 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
7981 return expand_expr (build1 (NOP_EXPR, type,
7982 build (COND_EXPR, TREE_TYPE (iftrue),
7983 TREE_OPERAND (exp, 0),
7984 iftrue, iffalse)),
7985 target, tmode, modifier);
7989 /* Note that COND_EXPRs whose type is a structure or union
7990 are required to be constructed to contain assignments of
7991 a temporary variable, so that we can evaluate them here
7992 for side effect only. If type is void, we must do likewise. */
7994 /* If an arm of the branch requires a cleanup,
7995 only that cleanup is performed. */
7997 tree singleton = 0;
7998 tree binary_op = 0, unary_op = 0;
8000 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8001 convert it to our mode, if necessary. */
8002 if (integer_onep (TREE_OPERAND (exp, 1))
8003 && integer_zerop (TREE_OPERAND (exp, 2))
8004 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8006 if (ignore)
8008 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8009 ro_modifier);
8010 return const0_rtx;
8013 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
8014 if (GET_MODE (op0) == mode)
8015 return op0;
8017 if (target == 0)
8018 target = gen_reg_rtx (mode);
8019 convert_move (target, op0, unsignedp);
8020 return target;
8023 /* Check for X ? A + B : A. If we have this, we can copy A to the
8024 output and conditionally add B. Similarly for unary operations.
8025 Don't do this if X has side-effects because those side effects
8026 might affect A or B and the "?" operation is a sequence point in
8027 ANSI. (operand_equal_p tests for side effects.) */
8029 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8030 && operand_equal_p (TREE_OPERAND (exp, 2),
8031 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8032 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8033 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8034 && operand_equal_p (TREE_OPERAND (exp, 1),
8035 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8036 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8037 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8038 && operand_equal_p (TREE_OPERAND (exp, 2),
8039 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8040 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8041 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8042 && operand_equal_p (TREE_OPERAND (exp, 1),
8043 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8044 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8046 /* If we are not to produce a result, we have no target. Otherwise,
8047 if a target was specified use it; it will not be used as an
8048 intermediate target unless it is safe. If no target, use a
8049 temporary. */
8051 if (ignore)
8052 temp = 0;
8053 else if (original_target
8054 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8055 || (singleton && GET_CODE (original_target) == REG
8056 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8057 && original_target == var_rtx (singleton)))
8058 && GET_MODE (original_target) == mode
8059 #ifdef HAVE_conditional_move
8060 && (! can_conditionally_move_p (mode)
8061 || GET_CODE (original_target) == REG
8062 || TREE_ADDRESSABLE (type))
8063 #endif
8064 && ! (GET_CODE (original_target) == MEM
8065 && MEM_VOLATILE_P (original_target)))
8066 temp = original_target;
8067 else if (TREE_ADDRESSABLE (type))
8068 abort ();
8069 else
8070 temp = assign_temp (type, 0, 0, 1);
8072 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8073 do the test of X as a store-flag operation, do this as
8074 A + ((X != 0) << log C). Similarly for other simple binary
8075 operators. Only do for C == 1 if BRANCH_COST is low. */
8076 if (temp && singleton && binary_op
8077 && (TREE_CODE (binary_op) == PLUS_EXPR
8078 || TREE_CODE (binary_op) == MINUS_EXPR
8079 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8080 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8081 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8082 : integer_onep (TREE_OPERAND (binary_op, 1)))
8083 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8085 rtx result;
8086 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8087 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8088 ? addv_optab : add_optab)
8089 : TREE_CODE (binary_op) == MINUS_EXPR
8090 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8091 ? subv_optab : sub_optab)
8092 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8093 : xor_optab);
8095 /* If we had X ? A : A + 1, do this as A + (X == 0).
8097 We have to invert the truth value here and then put it
8098 back later if do_store_flag fails. We cannot simply copy
8099 TREE_OPERAND (exp, 0) to another variable and modify that
8100 because invert_truthvalue can modify the tree pointed to
8101 by its argument. */
8102 if (singleton == TREE_OPERAND (exp, 1))
8103 TREE_OPERAND (exp, 0)
8104 = invert_truthvalue (TREE_OPERAND (exp, 0));
8106 result = do_store_flag (TREE_OPERAND (exp, 0),
8107 (safe_from_p (temp, singleton, 1)
8108 ? temp : NULL_RTX),
8109 mode, BRANCH_COST <= 1);
8111 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8112 result = expand_shift (LSHIFT_EXPR, mode, result,
8113 build_int_2 (tree_log2
8114 (TREE_OPERAND
8115 (binary_op, 1)),
8117 (safe_from_p (temp, singleton, 1)
8118 ? temp : NULL_RTX), 0);
8120 if (result)
8122 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8123 return expand_binop (mode, boptab, op1, result, temp,
8124 unsignedp, OPTAB_LIB_WIDEN);
8126 else if (singleton == TREE_OPERAND (exp, 1))
8127 TREE_OPERAND (exp, 0)
8128 = invert_truthvalue (TREE_OPERAND (exp, 0));
8131 do_pending_stack_adjust ();
8132 NO_DEFER_POP;
8133 op0 = gen_label_rtx ();
8135 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8137 if (temp != 0)
8139 /* If the target conflicts with the other operand of the
8140 binary op, we can't use it. Also, we can't use the target
8141 if it is a hard register, because evaluating the condition
8142 might clobber it. */
8143 if ((binary_op
8144 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8145 || (GET_CODE (temp) == REG
8146 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8147 temp = gen_reg_rtx (mode);
8148 store_expr (singleton, temp, 0);
8150 else
8151 expand_expr (singleton,
8152 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8153 if (singleton == TREE_OPERAND (exp, 1))
8154 jumpif (TREE_OPERAND (exp, 0), op0);
8155 else
8156 jumpifnot (TREE_OPERAND (exp, 0), op0);
8158 start_cleanup_deferral ();
8159 if (binary_op && temp == 0)
8160 /* Just touch the other operand. */
8161 expand_expr (TREE_OPERAND (binary_op, 1),
8162 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8163 else if (binary_op)
8164 store_expr (build (TREE_CODE (binary_op), type,
8165 make_tree (type, temp),
8166 TREE_OPERAND (binary_op, 1)),
8167 temp, 0);
8168 else
8169 store_expr (build1 (TREE_CODE (unary_op), type,
8170 make_tree (type, temp)),
8171 temp, 0);
8172 op1 = op0;
8174 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8175 comparison operator. If we have one of these cases, set the
8176 output to A, branch on A (cse will merge these two references),
8177 then set the output to FOO. */
8178 else if (temp
8179 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8180 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8181 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8182 TREE_OPERAND (exp, 1), 0)
8183 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8184 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8185 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8187 if (GET_CODE (temp) == REG
8188 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8189 temp = gen_reg_rtx (mode);
8190 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8191 jumpif (TREE_OPERAND (exp, 0), op0);
8193 start_cleanup_deferral ();
8194 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8195 op1 = op0;
8197 else if (temp
8198 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8199 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8200 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8201 TREE_OPERAND (exp, 2), 0)
8202 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8203 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8204 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8206 if (GET_CODE (temp) == REG
8207 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8208 temp = gen_reg_rtx (mode);
8209 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8210 jumpifnot (TREE_OPERAND (exp, 0), op0);
8212 start_cleanup_deferral ();
8213 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8214 op1 = op0;
8216 else
8218 op1 = gen_label_rtx ();
8219 jumpifnot (TREE_OPERAND (exp, 0), op0);
8221 start_cleanup_deferral ();
8223 /* One branch of the cond can be void, if it never returns. For
8224 example A ? throw : E */
8225 if (temp != 0
8226 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8227 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8228 else
8229 expand_expr (TREE_OPERAND (exp, 1),
8230 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8231 end_cleanup_deferral ();
8232 emit_queue ();
8233 emit_jump_insn (gen_jump (op1));
8234 emit_barrier ();
8235 emit_label (op0);
8236 start_cleanup_deferral ();
8237 if (temp != 0
8238 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8239 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8240 else
8241 expand_expr (TREE_OPERAND (exp, 2),
8242 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8245 end_cleanup_deferral ();
8247 emit_queue ();
8248 emit_label (op1);
8249 OK_DEFER_POP;
8251 return temp;
8254 case TARGET_EXPR:
8256 /* Something needs to be initialized, but we didn't know
8257 where that thing was when building the tree. For example,
8258 it could be the return value of a function, or a parameter
8259 to a function which lays down in the stack, or a temporary
8260 variable which must be passed by reference.
8262 We guarantee that the expression will either be constructed
8263 or copied into our original target. */
8265 tree slot = TREE_OPERAND (exp, 0);
8266 tree cleanups = NULL_TREE;
8267 tree exp1;
8269 if (TREE_CODE (slot) != VAR_DECL)
8270 abort ();
8272 if (! ignore)
8273 target = original_target;
8275 /* Set this here so that if we get a target that refers to a
8276 register variable that's already been used, put_reg_into_stack
8277 knows that it should fix up those uses. */
8278 TREE_USED (slot) = 1;
8280 if (target == 0)
8282 if (DECL_RTL_SET_P (slot))
8284 target = DECL_RTL (slot);
8285 /* If we have already expanded the slot, so don't do
8286 it again. (mrs) */
8287 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8288 return target;
8290 else
8292 target = assign_temp (type, 2, 0, 1);
8293 /* All temp slots at this level must not conflict. */
8294 preserve_temp_slots (target);
8295 SET_DECL_RTL (slot, target);
8296 if (TREE_ADDRESSABLE (slot))
8297 put_var_into_stack (slot);
8299 /* Since SLOT is not known to the called function
8300 to belong to its stack frame, we must build an explicit
8301 cleanup. This case occurs when we must build up a reference
8302 to pass the reference as an argument. In this case,
8303 it is very likely that such a reference need not be
8304 built here. */
8306 if (TREE_OPERAND (exp, 2) == 0)
8307 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8308 cleanups = TREE_OPERAND (exp, 2);
8311 else
8313 /* This case does occur, when expanding a parameter which
8314 needs to be constructed on the stack. The target
8315 is the actual stack address that we want to initialize.
8316 The function we call will perform the cleanup in this case. */
8318 /* If we have already assigned it space, use that space,
8319 not target that we were passed in, as our target
8320 parameter is only a hint. */
8321 if (DECL_RTL_SET_P (slot))
8323 target = DECL_RTL (slot);
8324 /* If we have already expanded the slot, so don't do
8325 it again. (mrs) */
8326 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8327 return target;
8329 else
8331 SET_DECL_RTL (slot, target);
8332 /* If we must have an addressable slot, then make sure that
8333 the RTL that we just stored in slot is OK. */
8334 if (TREE_ADDRESSABLE (slot))
8335 put_var_into_stack (slot);
8339 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8340 /* Mark it as expanded. */
8341 TREE_OPERAND (exp, 1) = NULL_TREE;
8343 store_expr (exp1, target, 0);
8345 expand_decl_cleanup (NULL_TREE, cleanups);
8347 return target;
8350 case INIT_EXPR:
8352 tree lhs = TREE_OPERAND (exp, 0);
8353 tree rhs = TREE_OPERAND (exp, 1);
8354 tree noncopied_parts = 0;
8355 tree lhs_type = TREE_TYPE (lhs);
8357 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8358 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8359 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8360 TYPE_NONCOPIED_PARTS (lhs_type));
8361 while (noncopied_parts != 0)
8363 expand_assignment (TREE_VALUE (noncopied_parts),
8364 TREE_PURPOSE (noncopied_parts), 0, 0);
8365 noncopied_parts = TREE_CHAIN (noncopied_parts);
8367 return temp;
8370 case MODIFY_EXPR:
8372 /* If lhs is complex, expand calls in rhs before computing it.
8373 That's so we don't compute a pointer and save it over a call.
8374 If lhs is simple, compute it first so we can give it as a
8375 target if the rhs is just a call. This avoids an extra temp and copy
8376 and that prevents a partial-subsumption which makes bad code.
8377 Actually we could treat component_ref's of vars like vars. */
8379 tree lhs = TREE_OPERAND (exp, 0);
8380 tree rhs = TREE_OPERAND (exp, 1);
8381 tree noncopied_parts = 0;
8382 tree lhs_type = TREE_TYPE (lhs);
8384 temp = 0;
8386 /* Check for |= or &= of a bitfield of size one into another bitfield
8387 of size 1. In this case, (unless we need the result of the
8388 assignment) we can do this more efficiently with a
8389 test followed by an assignment, if necessary.
8391 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8392 things change so we do, this code should be enhanced to
8393 support it. */
8394 if (ignore
8395 && TREE_CODE (lhs) == COMPONENT_REF
8396 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8397 || TREE_CODE (rhs) == BIT_AND_EXPR)
8398 && TREE_OPERAND (rhs, 0) == lhs
8399 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8400 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8401 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8403 rtx label = gen_label_rtx ();
8405 do_jump (TREE_OPERAND (rhs, 1),
8406 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8407 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8408 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8409 (TREE_CODE (rhs) == BIT_IOR_EXPR
8410 ? integer_one_node
8411 : integer_zero_node)),
8412 0, 0);
8413 do_pending_stack_adjust ();
8414 emit_label (label);
8415 return const0_rtx;
8418 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8419 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8420 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8421 TYPE_NONCOPIED_PARTS (lhs_type));
8423 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8424 while (noncopied_parts != 0)
8426 expand_assignment (TREE_PURPOSE (noncopied_parts),
8427 TREE_VALUE (noncopied_parts), 0, 0);
8428 noncopied_parts = TREE_CHAIN (noncopied_parts);
8430 return temp;
8433 case RETURN_EXPR:
8434 if (!TREE_OPERAND (exp, 0))
8435 expand_null_return ();
8436 else
8437 expand_return (TREE_OPERAND (exp, 0));
8438 return const0_rtx;
8440 case PREINCREMENT_EXPR:
8441 case PREDECREMENT_EXPR:
8442 return expand_increment (exp, 0, ignore);
8444 case POSTINCREMENT_EXPR:
8445 case POSTDECREMENT_EXPR:
8446 /* Faster to treat as pre-increment if result is not used. */
8447 return expand_increment (exp, ! ignore, ignore);
8449 case ADDR_EXPR:
8450 /* If nonzero, TEMP will be set to the address of something that might
8451 be a MEM corresponding to a stack slot. */
8452 temp = 0;
8454 /* Are we taking the address of a nested function? */
8455 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8456 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8457 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8458 && ! TREE_STATIC (exp))
8460 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8461 op0 = force_operand (op0, target);
8463 /* If we are taking the address of something erroneous, just
8464 return a zero. */
8465 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8466 return const0_rtx;
8467 else
8469 /* We make sure to pass const0_rtx down if we came in with
8470 ignore set, to avoid doing the cleanups twice for something. */
8471 op0 = expand_expr (TREE_OPERAND (exp, 0),
8472 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8473 (modifier == EXPAND_INITIALIZER
8474 ? modifier : EXPAND_CONST_ADDRESS));
8476 /* If we are going to ignore the result, OP0 will have been set
8477 to const0_rtx, so just return it. Don't get confused and
8478 think we are taking the address of the constant. */
8479 if (ignore)
8480 return op0;
8482 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8483 clever and returns a REG when given a MEM. */
8484 op0 = protect_from_queue (op0, 1);
8486 /* We would like the object in memory. If it is a constant, we can
8487 have it be statically allocated into memory. For a non-constant,
8488 we need to allocate some memory and store the value into it. */
8490 if (CONSTANT_P (op0))
8491 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8492 op0);
8493 else if (GET_CODE (op0) == MEM)
8495 mark_temp_addr_taken (op0);
8496 temp = XEXP (op0, 0);
8499 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8500 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8501 || GET_CODE (op0) == PARALLEL)
8503 /* If this object is in a register, it must be not
8504 be BLKmode. */
8505 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8506 tree nt = build_qualified_type (inner_type,
8507 (TYPE_QUALS (inner_type)
8508 | TYPE_QUAL_CONST));
8509 rtx memloc = assign_temp (nt, 1, 1, 1);
8511 mark_temp_addr_taken (memloc);
8512 if (GET_CODE (op0) == PARALLEL)
8513 /* Handle calls that pass values in multiple non-contiguous
8514 locations. The Irix 6 ABI has examples of this. */
8515 emit_group_store (memloc, op0,
8516 int_size_in_bytes (inner_type),
8517 TYPE_ALIGN (inner_type));
8518 else
8519 emit_move_insn (memloc, op0);
8520 op0 = memloc;
8523 if (GET_CODE (op0) != MEM)
8524 abort ();
8526 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8528 temp = XEXP (op0, 0);
8529 #ifdef POINTERS_EXTEND_UNSIGNED
8530 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8531 && mode == ptr_mode)
8532 temp = convert_memory_address (ptr_mode, temp);
8533 #endif
8534 return temp;
8537 op0 = force_operand (XEXP (op0, 0), target);
8540 if (flag_force_addr && GET_CODE (op0) != REG)
8541 op0 = force_reg (Pmode, op0);
8543 if (GET_CODE (op0) == REG
8544 && ! REG_USERVAR_P (op0))
8545 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8547 /* If we might have had a temp slot, add an equivalent address
8548 for it. */
8549 if (temp != 0)
8550 update_temp_slot_address (temp, op0);
8552 #ifdef POINTERS_EXTEND_UNSIGNED
8553 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8554 && mode == ptr_mode)
8555 op0 = convert_memory_address (ptr_mode, op0);
8556 #endif
8558 return op0;
8560 case ENTRY_VALUE_EXPR:
8561 abort ();
8563 /* COMPLEX type for Extended Pascal & Fortran */
8564 case COMPLEX_EXPR:
8566 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8567 rtx insns;
8569 /* Get the rtx code of the operands. */
8570 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8571 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8573 if (! target)
8574 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8576 start_sequence ();
8578 /* Move the real (op0) and imaginary (op1) parts to their location. */
8579 emit_move_insn (gen_realpart (mode, target), op0);
8580 emit_move_insn (gen_imagpart (mode, target), op1);
8582 insns = get_insns ();
8583 end_sequence ();
8585 /* Complex construction should appear as a single unit. */
8586 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8587 each with a separate pseudo as destination.
8588 It's not correct for flow to treat them as a unit. */
8589 if (GET_CODE (target) != CONCAT)
8590 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8591 else
8592 emit_insns (insns);
8594 return target;
8597 case REALPART_EXPR:
8598 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8599 return gen_realpart (mode, op0);
8601 case IMAGPART_EXPR:
8602 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8603 return gen_imagpart (mode, op0);
8605 case CONJ_EXPR:
8607 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8608 rtx imag_t;
8609 rtx insns;
8611 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8613 if (! target)
8614 target = gen_reg_rtx (mode);
8616 start_sequence ();
8618 /* Store the realpart and the negated imagpart to target. */
8619 emit_move_insn (gen_realpart (partmode, target),
8620 gen_realpart (partmode, op0));
8622 imag_t = gen_imagpart (partmode, target);
8623 temp = expand_unop (partmode,
8624 ! unsignedp && flag_trapv
8625 && (GET_MODE_CLASS(partmode) == MODE_INT)
8626 ? negv_optab : neg_optab,
8627 gen_imagpart (partmode, op0), imag_t, 0);
8628 if (temp != imag_t)
8629 emit_move_insn (imag_t, temp);
8631 insns = get_insns ();
8632 end_sequence ();
8634 /* Conjugate should appear as a single unit
8635 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8636 each with a separate pseudo as destination.
8637 It's not correct for flow to treat them as a unit. */
8638 if (GET_CODE (target) != CONCAT)
8639 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8640 else
8641 emit_insns (insns);
8643 return target;
8646 case TRY_CATCH_EXPR:
8648 tree handler = TREE_OPERAND (exp, 1);
8650 expand_eh_region_start ();
8652 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8654 expand_eh_region_end_cleanup (handler);
8656 return op0;
8659 case TRY_FINALLY_EXPR:
8661 tree try_block = TREE_OPERAND (exp, 0);
8662 tree finally_block = TREE_OPERAND (exp, 1);
8663 rtx finally_label = gen_label_rtx ();
8664 rtx done_label = gen_label_rtx ();
8665 rtx return_link = gen_reg_rtx (Pmode);
8666 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8667 (tree) finally_label, (tree) return_link);
8668 TREE_SIDE_EFFECTS (cleanup) = 1;
8670 /* Start a new binding layer that will keep track of all cleanup
8671 actions to be performed. */
8672 expand_start_bindings (2);
8674 target_temp_slot_level = temp_slot_level;
8676 expand_decl_cleanup (NULL_TREE, cleanup);
8677 op0 = expand_expr (try_block, target, tmode, modifier);
8679 preserve_temp_slots (op0);
8680 expand_end_bindings (NULL_TREE, 0, 0);
8681 emit_jump (done_label);
8682 emit_label (finally_label);
8683 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8684 emit_indirect_jump (return_link);
8685 emit_label (done_label);
8686 return op0;
8689 case GOTO_SUBROUTINE_EXPR:
8691 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8692 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8693 rtx return_address = gen_label_rtx ();
8694 emit_move_insn (return_link,
8695 gen_rtx_LABEL_REF (Pmode, return_address));
8696 emit_jump (subr);
8697 emit_label (return_address);
8698 return const0_rtx;
8701 case VA_ARG_EXPR:
8702 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8704 case EXC_PTR_EXPR:
8705 return get_exception_pointer (cfun);
8707 default:
8708 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8711 /* Here to do an ordinary binary operator, generating an instruction
8712 from the optab already placed in `this_optab'. */
8713 binop:
8714 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8715 subtarget = 0;
8716 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8717 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8718 binop2:
8719 temp = expand_binop (mode, this_optab, op0, op1, target,
8720 unsignedp, OPTAB_LIB_WIDEN);
8721 if (temp == 0)
8722 abort ();
8723 return temp;
8726 /* Similar to expand_expr, except that we don't specify a target, target
8727 mode, or modifier and we return the alignment of the inner type. This is
8728 used in cases where it is not necessary to align the result to the
8729 alignment of its type as long as we know the alignment of the result, for
8730 example for comparisons of BLKmode values. */
8732 static rtx
8733 expand_expr_unaligned (exp, palign)
8734 register tree exp;
8735 unsigned int *palign;
8737 register rtx op0;
8738 tree type = TREE_TYPE (exp);
8739 register enum machine_mode mode = TYPE_MODE (type);
8741 /* Default the alignment we return to that of the type. */
8742 *palign = TYPE_ALIGN (type);
8744 /* The only cases in which we do anything special is if the resulting mode
8745 is BLKmode. */
8746 if (mode != BLKmode)
8747 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8749 switch (TREE_CODE (exp))
8751 case CONVERT_EXPR:
8752 case NOP_EXPR:
8753 case NON_LVALUE_EXPR:
8754 /* Conversions between BLKmode values don't change the underlying
8755 alignment or value. */
8756 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8757 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8758 break;
8760 case ARRAY_REF:
8761 /* Much of the code for this case is copied directly from expand_expr.
8762 We need to duplicate it here because we will do something different
8763 in the fall-through case, so we need to handle the same exceptions
8764 it does. */
8766 tree array = TREE_OPERAND (exp, 0);
8767 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8768 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8769 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8770 HOST_WIDE_INT i;
8772 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8773 abort ();
8775 /* Optimize the special-case of a zero lower bound.
8777 We convert the low_bound to sizetype to avoid some problems
8778 with constant folding. (E.g. suppose the lower bound is 1,
8779 and its mode is QI. Without the conversion, (ARRAY
8780 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8781 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8783 if (! integer_zerop (low_bound))
8784 index = size_diffop (index, convert (sizetype, low_bound));
8786 /* If this is a constant index into a constant array,
8787 just get the value from the array. Handle both the cases when
8788 we have an explicit constructor and when our operand is a variable
8789 that was declared const. */
8791 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8792 && host_integerp (index, 0)
8793 && 0 > compare_tree_int (index,
8794 list_length (CONSTRUCTOR_ELTS
8795 (TREE_OPERAND (exp, 0)))))
8797 tree elem;
8799 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8800 i = tree_low_cst (index, 0);
8801 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8804 if (elem)
8805 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8808 else if (optimize >= 1
8809 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8810 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8811 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8813 if (TREE_CODE (index) == INTEGER_CST)
8815 tree init = DECL_INITIAL (array);
8817 if (TREE_CODE (init) == CONSTRUCTOR)
8819 tree elem;
8821 for (elem = CONSTRUCTOR_ELTS (init);
8822 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8823 elem = TREE_CHAIN (elem))
8826 if (elem)
8827 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8828 palign);
8833 /* Fall through. */
8835 case COMPONENT_REF:
8836 case BIT_FIELD_REF:
8837 /* If the operand is a CONSTRUCTOR, we can just extract the
8838 appropriate field if it is present. Don't do this if we have
8839 already written the data since we want to refer to that copy
8840 and varasm.c assumes that's what we'll do. */
8841 if (TREE_CODE (exp) != ARRAY_REF
8842 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8843 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8845 tree elt;
8847 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8848 elt = TREE_CHAIN (elt))
8849 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8850 /* Note that unlike the case in expand_expr, we know this is
8851 BLKmode and hence not an integer. */
8852 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8856 enum machine_mode mode1;
8857 HOST_WIDE_INT bitsize, bitpos;
8858 tree offset;
8859 int volatilep = 0;
8860 unsigned int alignment;
8861 int unsignedp;
8862 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8863 &mode1, &unsignedp, &volatilep,
8864 &alignment);
8866 /* If we got back the original object, something is wrong. Perhaps
8867 we are evaluating an expression too early. In any event, don't
8868 infinitely recurse. */
8869 if (tem == exp)
8870 abort ();
8872 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8874 /* If this is a constant, put it into a register if it is a
8875 legitimate constant and OFFSET is 0 and memory if it isn't. */
8876 if (CONSTANT_P (op0))
8878 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8880 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8881 && offset == 0)
8882 op0 = force_reg (inner_mode, op0);
8883 else
8884 op0 = validize_mem (force_const_mem (inner_mode, op0));
8887 if (offset != 0)
8889 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8891 /* If this object is in a register, put it into memory.
8892 This case can't occur in C, but can in Ada if we have
8893 unchecked conversion of an expression from a scalar type to
8894 an array or record type. */
8895 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8896 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8898 tree nt = build_qualified_type (TREE_TYPE (tem),
8899 (TYPE_QUALS (TREE_TYPE (tem))
8900 | TYPE_QUAL_CONST));
8901 rtx memloc = assign_temp (nt, 1, 1, 1);
8903 mark_temp_addr_taken (memloc);
8904 emit_move_insn (memloc, op0);
8905 op0 = memloc;
8908 if (GET_CODE (op0) != MEM)
8909 abort ();
8911 if (GET_MODE (offset_rtx) != ptr_mode)
8913 #ifdef POINTERS_EXTEND_UNSIGNED
8914 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8915 #else
8916 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8917 #endif
8920 op0 = change_address (op0, VOIDmode,
8921 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
8922 force_reg (ptr_mode,
8923 offset_rtx)));
8926 /* Don't forget about volatility even if this is a bitfield. */
8927 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
8929 op0 = copy_rtx (op0);
8930 MEM_VOLATILE_P (op0) = 1;
8933 /* Check the access. */
8934 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
8936 rtx to;
8937 int size;
8939 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
8940 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
8942 /* Check the access right of the pointer. */
8943 in_check_memory_usage = 1;
8944 if (size > BITS_PER_UNIT)
8945 emit_library_call (chkr_check_addr_libfunc,
8946 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
8947 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
8948 TYPE_MODE (sizetype),
8949 GEN_INT (MEMORY_USE_RO),
8950 TYPE_MODE (integer_type_node));
8951 in_check_memory_usage = 0;
8954 /* In cases where an aligned union has an unaligned object
8955 as a field, we might be extracting a BLKmode value from
8956 an integer-mode (e.g., SImode) object. Handle this case
8957 by doing the extract into an object as wide as the field
8958 (which we know to be the width of a basic mode), then
8959 storing into memory, and changing the mode to BLKmode.
8960 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8961 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8962 if (mode1 == VOIDmode
8963 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8964 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
8965 && (TYPE_ALIGN (type) > alignment
8966 || bitpos % TYPE_ALIGN (type) != 0)))
8968 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
8970 if (ext_mode == BLKmode)
8972 /* In this case, BITPOS must start at a byte boundary. */
8973 if (GET_CODE (op0) != MEM
8974 || bitpos % BITS_PER_UNIT != 0)
8975 abort ();
8977 op0 = change_address (op0, VOIDmode,
8978 plus_constant (XEXP (op0, 0),
8979 bitpos / BITS_PER_UNIT));
8981 else
8983 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
8984 TYPE_QUAL_CONST);
8985 rtx new = assign_temp (nt, 0, 1, 1);
8987 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
8988 unsignedp, NULL_RTX, ext_mode,
8989 ext_mode, alignment,
8990 int_size_in_bytes (TREE_TYPE (tem)));
8992 /* If the result is a record type and BITSIZE is narrower than
8993 the mode of OP0, an integral mode, and this is a big endian
8994 machine, we must put the field into the high-order bits. */
8995 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
8996 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8997 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
8998 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
8999 size_int (GET_MODE_BITSIZE
9000 (GET_MODE (op0))
9001 - bitsize),
9002 op0, 1);
9004 emit_move_insn (new, op0);
9005 op0 = copy_rtx (new);
9006 PUT_MODE (op0, BLKmode);
9009 else
9010 /* Get a reference to just this component. */
9011 op0 = change_address (op0, mode1,
9012 plus_constant (XEXP (op0, 0),
9013 (bitpos / BITS_PER_UNIT)));
9015 MEM_ALIAS_SET (op0) = get_alias_set (exp);
9017 /* Adjust the alignment in case the bit position is not
9018 a multiple of the alignment of the inner object. */
9019 while (bitpos % alignment != 0)
9020 alignment >>= 1;
9022 if (GET_CODE (XEXP (op0, 0)) == REG)
9023 mark_reg_pointer (XEXP (op0, 0), alignment);
9025 MEM_IN_STRUCT_P (op0) = 1;
9026 MEM_VOLATILE_P (op0) |= volatilep;
9028 *palign = alignment;
9029 return op0;
9032 default:
9033 break;
9037 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9040 /* Return the tree node if a ARG corresponds to a string constant or zero
9041 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9042 in bytes within the string that ARG is accessing. The type of the
9043 offset will be `sizetype'. */
9045 tree
9046 string_constant (arg, ptr_offset)
9047 tree arg;
9048 tree *ptr_offset;
9050 STRIP_NOPS (arg);
9052 if (TREE_CODE (arg) == ADDR_EXPR
9053 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9055 *ptr_offset = size_zero_node;
9056 return TREE_OPERAND (arg, 0);
9058 else if (TREE_CODE (arg) == PLUS_EXPR)
9060 tree arg0 = TREE_OPERAND (arg, 0);
9061 tree arg1 = TREE_OPERAND (arg, 1);
9063 STRIP_NOPS (arg0);
9064 STRIP_NOPS (arg1);
9066 if (TREE_CODE (arg0) == ADDR_EXPR
9067 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9069 *ptr_offset = convert (sizetype, arg1);
9070 return TREE_OPERAND (arg0, 0);
9072 else if (TREE_CODE (arg1) == ADDR_EXPR
9073 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9075 *ptr_offset = convert (sizetype, arg0);
9076 return TREE_OPERAND (arg1, 0);
9080 return 0;
9083 /* Expand code for a post- or pre- increment or decrement
9084 and return the RTX for the result.
9085 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9087 static rtx
9088 expand_increment (exp, post, ignore)
9089 register tree exp;
9090 int post, ignore;
9092 register rtx op0, op1;
9093 register rtx temp, value;
9094 register tree incremented = TREE_OPERAND (exp, 0);
9095 optab this_optab = add_optab;
9096 int icode;
9097 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9098 int op0_is_copy = 0;
9099 int single_insn = 0;
9100 /* 1 means we can't store into OP0 directly,
9101 because it is a subreg narrower than a word,
9102 and we don't dare clobber the rest of the word. */
9103 int bad_subreg = 0;
9105 /* Stabilize any component ref that might need to be
9106 evaluated more than once below. */
9107 if (!post
9108 || TREE_CODE (incremented) == BIT_FIELD_REF
9109 || (TREE_CODE (incremented) == COMPONENT_REF
9110 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9111 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9112 incremented = stabilize_reference (incremented);
9113 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9114 ones into save exprs so that they don't accidentally get evaluated
9115 more than once by the code below. */
9116 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9117 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9118 incremented = save_expr (incremented);
9120 /* Compute the operands as RTX.
9121 Note whether OP0 is the actual lvalue or a copy of it:
9122 I believe it is a copy iff it is a register or subreg
9123 and insns were generated in computing it. */
9125 temp = get_last_insn ();
9126 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9128 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9129 in place but instead must do sign- or zero-extension during assignment,
9130 so we copy it into a new register and let the code below use it as
9131 a copy.
9133 Note that we can safely modify this SUBREG since it is know not to be
9134 shared (it was made by the expand_expr call above). */
9136 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9138 if (post)
9139 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9140 else
9141 bad_subreg = 1;
9143 else if (GET_CODE (op0) == SUBREG
9144 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9146 /* We cannot increment this SUBREG in place. If we are
9147 post-incrementing, get a copy of the old value. Otherwise,
9148 just mark that we cannot increment in place. */
9149 if (post)
9150 op0 = copy_to_reg (op0);
9151 else
9152 bad_subreg = 1;
9155 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9156 && temp != get_last_insn ());
9157 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9158 EXPAND_MEMORY_USE_BAD);
9160 /* Decide whether incrementing or decrementing. */
9161 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9162 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9163 this_optab = sub_optab;
9165 /* Convert decrement by a constant into a negative increment. */
9166 if (this_optab == sub_optab
9167 && GET_CODE (op1) == CONST_INT)
9169 op1 = GEN_INT (-INTVAL (op1));
9170 this_optab = add_optab;
9173 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9174 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9176 /* For a preincrement, see if we can do this with a single instruction. */
9177 if (!post)
9179 icode = (int) this_optab->handlers[(int) mode].insn_code;
9180 if (icode != (int) CODE_FOR_nothing
9181 /* Make sure that OP0 is valid for operands 0 and 1
9182 of the insn we want to queue. */
9183 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9184 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9185 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9186 single_insn = 1;
9189 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9190 then we cannot just increment OP0. We must therefore contrive to
9191 increment the original value. Then, for postincrement, we can return
9192 OP0 since it is a copy of the old value. For preincrement, expand here
9193 unless we can do it with a single insn.
9195 Likewise if storing directly into OP0 would clobber high bits
9196 we need to preserve (bad_subreg). */
9197 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9199 /* This is the easiest way to increment the value wherever it is.
9200 Problems with multiple evaluation of INCREMENTED are prevented
9201 because either (1) it is a component_ref or preincrement,
9202 in which case it was stabilized above, or (2) it is an array_ref
9203 with constant index in an array in a register, which is
9204 safe to reevaluate. */
9205 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9206 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9207 ? MINUS_EXPR : PLUS_EXPR),
9208 TREE_TYPE (exp),
9209 incremented,
9210 TREE_OPERAND (exp, 1));
9212 while (TREE_CODE (incremented) == NOP_EXPR
9213 || TREE_CODE (incremented) == CONVERT_EXPR)
9215 newexp = convert (TREE_TYPE (incremented), newexp);
9216 incremented = TREE_OPERAND (incremented, 0);
9219 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9220 return post ? op0 : temp;
9223 if (post)
9225 /* We have a true reference to the value in OP0.
9226 If there is an insn to add or subtract in this mode, queue it.
9227 Queueing the increment insn avoids the register shuffling
9228 that often results if we must increment now and first save
9229 the old value for subsequent use. */
9231 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9232 op0 = stabilize (op0);
9233 #endif
9235 icode = (int) this_optab->handlers[(int) mode].insn_code;
9236 if (icode != (int) CODE_FOR_nothing
9237 /* Make sure that OP0 is valid for operands 0 and 1
9238 of the insn we want to queue. */
9239 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9240 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9242 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9243 op1 = force_reg (mode, op1);
9245 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9247 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9249 rtx addr = (general_operand (XEXP (op0, 0), mode)
9250 ? force_reg (Pmode, XEXP (op0, 0))
9251 : copy_to_reg (XEXP (op0, 0)));
9252 rtx temp, result;
9254 op0 = change_address (op0, VOIDmode, addr);
9255 temp = force_reg (GET_MODE (op0), op0);
9256 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9257 op1 = force_reg (mode, op1);
9259 /* The increment queue is LIFO, thus we have to `queue'
9260 the instructions in reverse order. */
9261 enqueue_insn (op0, gen_move_insn (op0, temp));
9262 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9263 return result;
9267 /* Preincrement, or we can't increment with one simple insn. */
9268 if (post)
9269 /* Save a copy of the value before inc or dec, to return it later. */
9270 temp = value = copy_to_reg (op0);
9271 else
9272 /* Arrange to return the incremented value. */
9273 /* Copy the rtx because expand_binop will protect from the queue,
9274 and the results of that would be invalid for us to return
9275 if our caller does emit_queue before using our result. */
9276 temp = copy_rtx (value = op0);
9278 /* Increment however we can. */
9279 op1 = expand_binop (mode, this_optab, value, op1,
9280 current_function_check_memory_usage ? NULL_RTX : op0,
9281 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9282 /* Make sure the value is stored into OP0. */
9283 if (op1 != op0)
9284 emit_move_insn (op0, op1);
9286 return temp;
9289 /* At the start of a function, record that we have no previously-pushed
9290 arguments waiting to be popped. */
9292 void
9293 init_pending_stack_adjust ()
9295 pending_stack_adjust = 0;
9298 /* When exiting from function, if safe, clear out any pending stack adjust
9299 so the adjustment won't get done.
9301 Note, if the current function calls alloca, then it must have a
9302 frame pointer regardless of the value of flag_omit_frame_pointer. */
9304 void
9305 clear_pending_stack_adjust ()
9307 #ifdef EXIT_IGNORE_STACK
9308 if (optimize > 0
9309 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9310 && EXIT_IGNORE_STACK
9311 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9312 && ! flag_inline_functions)
9314 stack_pointer_delta -= pending_stack_adjust,
9315 pending_stack_adjust = 0;
9317 #endif
9320 /* Pop any previously-pushed arguments that have not been popped yet. */
9322 void
9323 do_pending_stack_adjust ()
9325 if (inhibit_defer_pop == 0)
9327 if (pending_stack_adjust != 0)
9328 adjust_stack (GEN_INT (pending_stack_adjust));
9329 pending_stack_adjust = 0;
9333 /* Expand conditional expressions. */
9335 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9336 LABEL is an rtx of code CODE_LABEL, in this function and all the
9337 functions here. */
9339 void
9340 jumpifnot (exp, label)
9341 tree exp;
9342 rtx label;
9344 do_jump (exp, label, NULL_RTX);
9347 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9349 void
9350 jumpif (exp, label)
9351 tree exp;
9352 rtx label;
9354 do_jump (exp, NULL_RTX, label);
9357 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9358 the result is zero, or IF_TRUE_LABEL if the result is one.
9359 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9360 meaning fall through in that case.
9362 do_jump always does any pending stack adjust except when it does not
9363 actually perform a jump. An example where there is no jump
9364 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9366 This function is responsible for optimizing cases such as
9367 &&, || and comparison operators in EXP. */
9369 void
9370 do_jump (exp, if_false_label, if_true_label)
9371 tree exp;
9372 rtx if_false_label, if_true_label;
9374 register enum tree_code code = TREE_CODE (exp);
9375 /* Some cases need to create a label to jump to
9376 in order to properly fall through.
9377 These cases set DROP_THROUGH_LABEL nonzero. */
9378 rtx drop_through_label = 0;
9379 rtx temp;
9380 int i;
9381 tree type;
9382 enum machine_mode mode;
9384 #ifdef MAX_INTEGER_COMPUTATION_MODE
9385 check_max_integer_computation_mode (exp);
9386 #endif
9388 emit_queue ();
9390 switch (code)
9392 case ERROR_MARK:
9393 break;
9395 case INTEGER_CST:
9396 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9397 if (temp)
9398 emit_jump (temp);
9399 break;
9401 #if 0
9402 /* This is not true with #pragma weak */
9403 case ADDR_EXPR:
9404 /* The address of something can never be zero. */
9405 if (if_true_label)
9406 emit_jump (if_true_label);
9407 break;
9408 #endif
9410 case NOP_EXPR:
9411 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9412 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9413 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9414 goto normal;
9415 case CONVERT_EXPR:
9416 /* If we are narrowing the operand, we have to do the compare in the
9417 narrower mode. */
9418 if ((TYPE_PRECISION (TREE_TYPE (exp))
9419 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9420 goto normal;
9421 case NON_LVALUE_EXPR:
9422 case REFERENCE_EXPR:
9423 case ABS_EXPR:
9424 case NEGATE_EXPR:
9425 case LROTATE_EXPR:
9426 case RROTATE_EXPR:
9427 /* These cannot change zero->non-zero or vice versa. */
9428 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9429 break;
9431 case WITH_RECORD_EXPR:
9432 /* Put the object on the placeholder list, recurse through our first
9433 operand, and pop the list. */
9434 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9435 placeholder_list);
9436 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9437 placeholder_list = TREE_CHAIN (placeholder_list);
9438 break;
9440 #if 0
9441 /* This is never less insns than evaluating the PLUS_EXPR followed by
9442 a test and can be longer if the test is eliminated. */
9443 case PLUS_EXPR:
9444 /* Reduce to minus. */
9445 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9446 TREE_OPERAND (exp, 0),
9447 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9448 TREE_OPERAND (exp, 1))));
9449 /* Process as MINUS. */
9450 #endif
9452 case MINUS_EXPR:
9453 /* Non-zero iff operands of minus differ. */
9454 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9455 TREE_OPERAND (exp, 0),
9456 TREE_OPERAND (exp, 1)),
9457 NE, NE, if_false_label, if_true_label);
9458 break;
9460 case BIT_AND_EXPR:
9461 /* If we are AND'ing with a small constant, do this comparison in the
9462 smallest type that fits. If the machine doesn't have comparisons
9463 that small, it will be converted back to the wider comparison.
9464 This helps if we are testing the sign bit of a narrower object.
9465 combine can't do this for us because it can't know whether a
9466 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9468 if (! SLOW_BYTE_ACCESS
9469 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9470 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9471 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9472 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9473 && (type = type_for_mode (mode, 1)) != 0
9474 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9475 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9476 != CODE_FOR_nothing))
9478 do_jump (convert (type, exp), if_false_label, if_true_label);
9479 break;
9481 goto normal;
9483 case TRUTH_NOT_EXPR:
9484 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9485 break;
9487 case TRUTH_ANDIF_EXPR:
9488 if (if_false_label == 0)
9489 if_false_label = drop_through_label = gen_label_rtx ();
9490 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9491 start_cleanup_deferral ();
9492 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9493 end_cleanup_deferral ();
9494 break;
9496 case TRUTH_ORIF_EXPR:
9497 if (if_true_label == 0)
9498 if_true_label = drop_through_label = gen_label_rtx ();
9499 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9500 start_cleanup_deferral ();
9501 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9502 end_cleanup_deferral ();
9503 break;
9505 case COMPOUND_EXPR:
9506 push_temp_slots ();
9507 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9508 preserve_temp_slots (NULL_RTX);
9509 free_temp_slots ();
9510 pop_temp_slots ();
9511 emit_queue ();
9512 do_pending_stack_adjust ();
9513 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9514 break;
9516 case COMPONENT_REF:
9517 case BIT_FIELD_REF:
9518 case ARRAY_REF:
9520 HOST_WIDE_INT bitsize, bitpos;
9521 int unsignedp;
9522 enum machine_mode mode;
9523 tree type;
9524 tree offset;
9525 int volatilep = 0;
9526 unsigned int alignment;
9528 /* Get description of this reference. We don't actually care
9529 about the underlying object here. */
9530 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9531 &unsignedp, &volatilep, &alignment);
9533 type = type_for_size (bitsize, unsignedp);
9534 if (! SLOW_BYTE_ACCESS
9535 && type != 0 && bitsize >= 0
9536 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9537 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9538 != CODE_FOR_nothing))
9540 do_jump (convert (type, exp), if_false_label, if_true_label);
9541 break;
9543 goto normal;
9546 case COND_EXPR:
9547 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9548 if (integer_onep (TREE_OPERAND (exp, 1))
9549 && integer_zerop (TREE_OPERAND (exp, 2)))
9550 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9552 else if (integer_zerop (TREE_OPERAND (exp, 1))
9553 && integer_onep (TREE_OPERAND (exp, 2)))
9554 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9556 else
9558 register rtx label1 = gen_label_rtx ();
9559 drop_through_label = gen_label_rtx ();
9561 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9563 start_cleanup_deferral ();
9564 /* Now the THEN-expression. */
9565 do_jump (TREE_OPERAND (exp, 1),
9566 if_false_label ? if_false_label : drop_through_label,
9567 if_true_label ? if_true_label : drop_through_label);
9568 /* In case the do_jump just above never jumps. */
9569 do_pending_stack_adjust ();
9570 emit_label (label1);
9572 /* Now the ELSE-expression. */
9573 do_jump (TREE_OPERAND (exp, 2),
9574 if_false_label ? if_false_label : drop_through_label,
9575 if_true_label ? if_true_label : drop_through_label);
9576 end_cleanup_deferral ();
9578 break;
9580 case EQ_EXPR:
9582 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9584 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9585 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9587 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9588 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9589 do_jump
9590 (fold
9591 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9592 fold (build (EQ_EXPR, TREE_TYPE (exp),
9593 fold (build1 (REALPART_EXPR,
9594 TREE_TYPE (inner_type),
9595 exp0)),
9596 fold (build1 (REALPART_EXPR,
9597 TREE_TYPE (inner_type),
9598 exp1)))),
9599 fold (build (EQ_EXPR, TREE_TYPE (exp),
9600 fold (build1 (IMAGPART_EXPR,
9601 TREE_TYPE (inner_type),
9602 exp0)),
9603 fold (build1 (IMAGPART_EXPR,
9604 TREE_TYPE (inner_type),
9605 exp1)))))),
9606 if_false_label, if_true_label);
9609 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9610 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9612 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9613 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9614 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9615 else
9616 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9617 break;
9620 case NE_EXPR:
9622 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9624 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9625 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9627 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9628 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9629 do_jump
9630 (fold
9631 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9632 fold (build (NE_EXPR, TREE_TYPE (exp),
9633 fold (build1 (REALPART_EXPR,
9634 TREE_TYPE (inner_type),
9635 exp0)),
9636 fold (build1 (REALPART_EXPR,
9637 TREE_TYPE (inner_type),
9638 exp1)))),
9639 fold (build (NE_EXPR, TREE_TYPE (exp),
9640 fold (build1 (IMAGPART_EXPR,
9641 TREE_TYPE (inner_type),
9642 exp0)),
9643 fold (build1 (IMAGPART_EXPR,
9644 TREE_TYPE (inner_type),
9645 exp1)))))),
9646 if_false_label, if_true_label);
9649 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9650 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9652 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9653 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9654 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9655 else
9656 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9657 break;
9660 case LT_EXPR:
9661 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9662 if (GET_MODE_CLASS (mode) == MODE_INT
9663 && ! can_compare_p (LT, mode, ccp_jump))
9664 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9665 else
9666 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9667 break;
9669 case LE_EXPR:
9670 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9671 if (GET_MODE_CLASS (mode) == MODE_INT
9672 && ! can_compare_p (LE, mode, ccp_jump))
9673 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9674 else
9675 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9676 break;
9678 case GT_EXPR:
9679 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9680 if (GET_MODE_CLASS (mode) == MODE_INT
9681 && ! can_compare_p (GT, mode, ccp_jump))
9682 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9683 else
9684 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9685 break;
9687 case GE_EXPR:
9688 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9689 if (GET_MODE_CLASS (mode) == MODE_INT
9690 && ! can_compare_p (GE, mode, ccp_jump))
9691 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9692 else
9693 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9694 break;
9696 case UNORDERED_EXPR:
9697 case ORDERED_EXPR:
9699 enum rtx_code cmp, rcmp;
9700 int do_rev;
9702 if (code == UNORDERED_EXPR)
9703 cmp = UNORDERED, rcmp = ORDERED;
9704 else
9705 cmp = ORDERED, rcmp = UNORDERED;
9706 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9708 do_rev = 0;
9709 if (! can_compare_p (cmp, mode, ccp_jump)
9710 && (can_compare_p (rcmp, mode, ccp_jump)
9711 /* If the target doesn't provide either UNORDERED or ORDERED
9712 comparisons, canonicalize on UNORDERED for the library. */
9713 || rcmp == UNORDERED))
9714 do_rev = 1;
9716 if (! do_rev)
9717 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9718 else
9719 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9721 break;
9724 enum rtx_code rcode1;
9725 enum tree_code tcode2;
9727 case UNLT_EXPR:
9728 rcode1 = UNLT;
9729 tcode2 = LT_EXPR;
9730 goto unordered_bcc;
9731 case UNLE_EXPR:
9732 rcode1 = UNLE;
9733 tcode2 = LE_EXPR;
9734 goto unordered_bcc;
9735 case UNGT_EXPR:
9736 rcode1 = UNGT;
9737 tcode2 = GT_EXPR;
9738 goto unordered_bcc;
9739 case UNGE_EXPR:
9740 rcode1 = UNGE;
9741 tcode2 = GE_EXPR;
9742 goto unordered_bcc;
9743 case UNEQ_EXPR:
9744 rcode1 = UNEQ;
9745 tcode2 = EQ_EXPR;
9746 goto unordered_bcc;
9748 unordered_bcc:
9749 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9750 if (can_compare_p (rcode1, mode, ccp_jump))
9751 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9752 if_true_label);
9753 else
9755 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9756 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9757 tree cmp0, cmp1;
9759 /* If the target doesn't support combined unordered
9760 compares, decompose into UNORDERED + comparison. */
9761 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9762 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9763 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9764 do_jump (exp, if_false_label, if_true_label);
9767 break;
9769 default:
9770 normal:
9771 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9772 #if 0
9773 /* This is not needed any more and causes poor code since it causes
9774 comparisons and tests from non-SI objects to have different code
9775 sequences. */
9776 /* Copy to register to avoid generating bad insns by cse
9777 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9778 if (!cse_not_expected && GET_CODE (temp) == MEM)
9779 temp = copy_to_reg (temp);
9780 #endif
9781 do_pending_stack_adjust ();
9782 /* Do any postincrements in the expression that was tested. */
9783 emit_queue ();
9785 if (GET_CODE (temp) == CONST_INT
9786 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9787 || GET_CODE (temp) == LABEL_REF)
9789 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9790 if (target)
9791 emit_jump (target);
9793 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9794 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9795 /* Note swapping the labels gives us not-equal. */
9796 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9797 else if (GET_MODE (temp) != VOIDmode)
9798 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9799 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9800 GET_MODE (temp), NULL_RTX, 0,
9801 if_false_label, if_true_label);
9802 else
9803 abort ();
9806 if (drop_through_label)
9808 /* If do_jump produces code that might be jumped around,
9809 do any stack adjusts from that code, before the place
9810 where control merges in. */
9811 do_pending_stack_adjust ();
9812 emit_label (drop_through_label);
9816 /* Given a comparison expression EXP for values too wide to be compared
9817 with one insn, test the comparison and jump to the appropriate label.
9818 The code of EXP is ignored; we always test GT if SWAP is 0,
9819 and LT if SWAP is 1. */
9821 static void
9822 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9823 tree exp;
9824 int swap;
9825 rtx if_false_label, if_true_label;
9827 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9828 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9829 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9830 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9832 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9835 /* Compare OP0 with OP1, word at a time, in mode MODE.
9836 UNSIGNEDP says to do unsigned comparison.
9837 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9839 void
9840 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9841 enum machine_mode mode;
9842 int unsignedp;
9843 rtx op0, op1;
9844 rtx if_false_label, if_true_label;
9846 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9847 rtx drop_through_label = 0;
9848 int i;
9850 if (! if_true_label || ! if_false_label)
9851 drop_through_label = gen_label_rtx ();
9852 if (! if_true_label)
9853 if_true_label = drop_through_label;
9854 if (! if_false_label)
9855 if_false_label = drop_through_label;
9857 /* Compare a word at a time, high order first. */
9858 for (i = 0; i < nwords; i++)
9860 rtx op0_word, op1_word;
9862 if (WORDS_BIG_ENDIAN)
9864 op0_word = operand_subword_force (op0, i, mode);
9865 op1_word = operand_subword_force (op1, i, mode);
9867 else
9869 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9870 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9873 /* All but high-order word must be compared as unsigned. */
9874 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9875 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9876 NULL_RTX, if_true_label);
9878 /* Consider lower words only if these are equal. */
9879 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9880 NULL_RTX, 0, NULL_RTX, if_false_label);
9883 if (if_false_label)
9884 emit_jump (if_false_label);
9885 if (drop_through_label)
9886 emit_label (drop_through_label);
9889 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9890 with one insn, test the comparison and jump to the appropriate label. */
9892 static void
9893 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9894 tree exp;
9895 rtx if_false_label, if_true_label;
9897 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9898 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9899 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9900 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9901 int i;
9902 rtx drop_through_label = 0;
9904 if (! if_false_label)
9905 drop_through_label = if_false_label = gen_label_rtx ();
9907 for (i = 0; i < nwords; i++)
9908 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9909 operand_subword_force (op1, i, mode),
9910 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9911 word_mode, NULL_RTX, 0, if_false_label,
9912 NULL_RTX);
9914 if (if_true_label)
9915 emit_jump (if_true_label);
9916 if (drop_through_label)
9917 emit_label (drop_through_label);
9920 /* Jump according to whether OP0 is 0.
9921 We assume that OP0 has an integer mode that is too wide
9922 for the available compare insns. */
9924 void
9925 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9926 rtx op0;
9927 rtx if_false_label, if_true_label;
9929 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9930 rtx part;
9931 int i;
9932 rtx drop_through_label = 0;
9934 /* The fastest way of doing this comparison on almost any machine is to
9935 "or" all the words and compare the result. If all have to be loaded
9936 from memory and this is a very wide item, it's possible this may
9937 be slower, but that's highly unlikely. */
9939 part = gen_reg_rtx (word_mode);
9940 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9941 for (i = 1; i < nwords && part != 0; i++)
9942 part = expand_binop (word_mode, ior_optab, part,
9943 operand_subword_force (op0, i, GET_MODE (op0)),
9944 part, 1, OPTAB_WIDEN);
9946 if (part != 0)
9948 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9949 NULL_RTX, 0, if_false_label, if_true_label);
9951 return;
9954 /* If we couldn't do the "or" simply, do this with a series of compares. */
9955 if (! if_false_label)
9956 drop_through_label = if_false_label = gen_label_rtx ();
9958 for (i = 0; i < nwords; i++)
9959 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9960 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9961 if_false_label, NULL_RTX);
9963 if (if_true_label)
9964 emit_jump (if_true_label);
9966 if (drop_through_label)
9967 emit_label (drop_through_label);
9970 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9971 (including code to compute the values to be compared)
9972 and set (CC0) according to the result.
9973 The decision as to signed or unsigned comparison must be made by the caller.
9975 We force a stack adjustment unless there are currently
9976 things pushed on the stack that aren't yet used.
9978 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9979 compared.
9981 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9982 size of MODE should be used. */
9985 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9986 register rtx op0, op1;
9987 enum rtx_code code;
9988 int unsignedp;
9989 enum machine_mode mode;
9990 rtx size;
9991 unsigned int align;
9993 rtx tem;
9995 /* If one operand is constant, make it the second one. Only do this
9996 if the other operand is not constant as well. */
9998 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9999 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10001 tem = op0;
10002 op0 = op1;
10003 op1 = tem;
10004 code = swap_condition (code);
10007 if (flag_force_mem)
10009 op0 = force_not_mem (op0);
10010 op1 = force_not_mem (op1);
10013 do_pending_stack_adjust ();
10015 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10016 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10017 return tem;
10019 #if 0
10020 /* There's no need to do this now that combine.c can eliminate lots of
10021 sign extensions. This can be less efficient in certain cases on other
10022 machines. */
10024 /* If this is a signed equality comparison, we can do it as an
10025 unsigned comparison since zero-extension is cheaper than sign
10026 extension and comparisons with zero are done as unsigned. This is
10027 the case even on machines that can do fast sign extension, since
10028 zero-extension is easier to combine with other operations than
10029 sign-extension is. If we are comparing against a constant, we must
10030 convert it to what it would look like unsigned. */
10031 if ((code == EQ || code == NE) && ! unsignedp
10032 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10034 if (GET_CODE (op1) == CONST_INT
10035 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10036 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10037 unsignedp = 1;
10039 #endif
10041 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10043 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10046 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10047 The decision as to signed or unsigned comparison must be made by the caller.
10049 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10050 compared.
10052 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10053 size of MODE should be used. */
10055 void
10056 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
10057 if_false_label, if_true_label)
10058 register rtx op0, op1;
10059 enum rtx_code code;
10060 int unsignedp;
10061 enum machine_mode mode;
10062 rtx size;
10063 unsigned int align;
10064 rtx if_false_label, if_true_label;
10066 rtx tem;
10067 int dummy_true_label = 0;
10069 /* Reverse the comparison if that is safe and we want to jump if it is
10070 false. */
10071 if (! if_true_label && ! FLOAT_MODE_P (mode))
10073 if_true_label = if_false_label;
10074 if_false_label = 0;
10075 code = reverse_condition (code);
10078 /* If one operand is constant, make it the second one. Only do this
10079 if the other operand is not constant as well. */
10081 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10082 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10084 tem = op0;
10085 op0 = op1;
10086 op1 = tem;
10087 code = swap_condition (code);
10090 if (flag_force_mem)
10092 op0 = force_not_mem (op0);
10093 op1 = force_not_mem (op1);
10096 do_pending_stack_adjust ();
10098 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10099 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10101 if (tem == const_true_rtx)
10103 if (if_true_label)
10104 emit_jump (if_true_label);
10106 else
10108 if (if_false_label)
10109 emit_jump (if_false_label);
10111 return;
10114 #if 0
10115 /* There's no need to do this now that combine.c can eliminate lots of
10116 sign extensions. This can be less efficient in certain cases on other
10117 machines. */
10119 /* If this is a signed equality comparison, we can do it as an
10120 unsigned comparison since zero-extension is cheaper than sign
10121 extension and comparisons with zero are done as unsigned. This is
10122 the case even on machines that can do fast sign extension, since
10123 zero-extension is easier to combine with other operations than
10124 sign-extension is. If we are comparing against a constant, we must
10125 convert it to what it would look like unsigned. */
10126 if ((code == EQ || code == NE) && ! unsignedp
10127 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10129 if (GET_CODE (op1) == CONST_INT
10130 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10131 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10132 unsignedp = 1;
10134 #endif
10136 if (! if_true_label)
10138 dummy_true_label = 1;
10139 if_true_label = gen_label_rtx ();
10142 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10143 if_true_label);
10145 if (if_false_label)
10146 emit_jump (if_false_label);
10147 if (dummy_true_label)
10148 emit_label (if_true_label);
10151 /* Generate code for a comparison expression EXP (including code to compute
10152 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10153 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10154 generated code will drop through.
10155 SIGNED_CODE should be the rtx operation for this comparison for
10156 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10158 We force a stack adjustment unless there are currently
10159 things pushed on the stack that aren't yet used. */
10161 static void
10162 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10163 if_true_label)
10164 register tree exp;
10165 enum rtx_code signed_code, unsigned_code;
10166 rtx if_false_label, if_true_label;
10168 unsigned int align0, align1;
10169 register rtx op0, op1;
10170 register tree type;
10171 register enum machine_mode mode;
10172 int unsignedp;
10173 enum rtx_code code;
10175 /* Don't crash if the comparison was erroneous. */
10176 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10177 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10178 return;
10180 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10181 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10182 return;
10184 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10185 mode = TYPE_MODE (type);
10186 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10187 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10188 || (GET_MODE_BITSIZE (mode)
10189 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10190 1)))))))
10192 /* op0 might have been replaced by promoted constant, in which
10193 case the type of second argument should be used. */
10194 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10195 mode = TYPE_MODE (type);
10197 unsignedp = TREE_UNSIGNED (type);
10198 code = unsignedp ? unsigned_code : signed_code;
10200 #ifdef HAVE_canonicalize_funcptr_for_compare
10201 /* If function pointers need to be "canonicalized" before they can
10202 be reliably compared, then canonicalize them. */
10203 if (HAVE_canonicalize_funcptr_for_compare
10204 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10205 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10206 == FUNCTION_TYPE))
10208 rtx new_op0 = gen_reg_rtx (mode);
10210 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10211 op0 = new_op0;
10214 if (HAVE_canonicalize_funcptr_for_compare
10215 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10216 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10217 == FUNCTION_TYPE))
10219 rtx new_op1 = gen_reg_rtx (mode);
10221 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10222 op1 = new_op1;
10224 #endif
10226 /* Do any postincrements in the expression that was tested. */
10227 emit_queue ();
10229 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10230 ((mode == BLKmode)
10231 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10232 MIN (align0, align1),
10233 if_false_label, if_true_label);
10236 /* Generate code to calculate EXP using a store-flag instruction
10237 and return an rtx for the result. EXP is either a comparison
10238 or a TRUTH_NOT_EXPR whose operand is a comparison.
10240 If TARGET is nonzero, store the result there if convenient.
10242 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10243 cheap.
10245 Return zero if there is no suitable set-flag instruction
10246 available on this machine.
10248 Once expand_expr has been called on the arguments of the comparison,
10249 we are committed to doing the store flag, since it is not safe to
10250 re-evaluate the expression. We emit the store-flag insn by calling
10251 emit_store_flag, but only expand the arguments if we have a reason
10252 to believe that emit_store_flag will be successful. If we think that
10253 it will, but it isn't, we have to simulate the store-flag with a
10254 set/jump/set sequence. */
10256 static rtx
10257 do_store_flag (exp, target, mode, only_cheap)
10258 tree exp;
10259 rtx target;
10260 enum machine_mode mode;
10261 int only_cheap;
10263 enum rtx_code code;
10264 tree arg0, arg1, type;
10265 tree tem;
10266 enum machine_mode operand_mode;
10267 int invert = 0;
10268 int unsignedp;
10269 rtx op0, op1;
10270 enum insn_code icode;
10271 rtx subtarget = target;
10272 rtx result, label;
10274 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10275 result at the end. We can't simply invert the test since it would
10276 have already been inverted if it were valid. This case occurs for
10277 some floating-point comparisons. */
10279 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10280 invert = 1, exp = TREE_OPERAND (exp, 0);
10282 arg0 = TREE_OPERAND (exp, 0);
10283 arg1 = TREE_OPERAND (exp, 1);
10285 /* Don't crash if the comparison was erroneous. */
10286 if (arg0 == error_mark_node || arg1 == error_mark_node)
10287 return const0_rtx;
10289 type = TREE_TYPE (arg0);
10290 operand_mode = TYPE_MODE (type);
10291 unsignedp = TREE_UNSIGNED (type);
10293 /* We won't bother with BLKmode store-flag operations because it would mean
10294 passing a lot of information to emit_store_flag. */
10295 if (operand_mode == BLKmode)
10296 return 0;
10298 /* We won't bother with store-flag operations involving function pointers
10299 when function pointers must be canonicalized before comparisons. */
10300 #ifdef HAVE_canonicalize_funcptr_for_compare
10301 if (HAVE_canonicalize_funcptr_for_compare
10302 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10303 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10304 == FUNCTION_TYPE))
10305 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10306 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10307 == FUNCTION_TYPE))))
10308 return 0;
10309 #endif
10311 STRIP_NOPS (arg0);
10312 STRIP_NOPS (arg1);
10314 /* Get the rtx comparison code to use. We know that EXP is a comparison
10315 operation of some type. Some comparisons against 1 and -1 can be
10316 converted to comparisons with zero. Do so here so that the tests
10317 below will be aware that we have a comparison with zero. These
10318 tests will not catch constants in the first operand, but constants
10319 are rarely passed as the first operand. */
10321 switch (TREE_CODE (exp))
10323 case EQ_EXPR:
10324 code = EQ;
10325 break;
10326 case NE_EXPR:
10327 code = NE;
10328 break;
10329 case LT_EXPR:
10330 if (integer_onep (arg1))
10331 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10332 else
10333 code = unsignedp ? LTU : LT;
10334 break;
10335 case LE_EXPR:
10336 if (! unsignedp && integer_all_onesp (arg1))
10337 arg1 = integer_zero_node, code = LT;
10338 else
10339 code = unsignedp ? LEU : LE;
10340 break;
10341 case GT_EXPR:
10342 if (! unsignedp && integer_all_onesp (arg1))
10343 arg1 = integer_zero_node, code = GE;
10344 else
10345 code = unsignedp ? GTU : GT;
10346 break;
10347 case GE_EXPR:
10348 if (integer_onep (arg1))
10349 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10350 else
10351 code = unsignedp ? GEU : GE;
10352 break;
10354 case UNORDERED_EXPR:
10355 code = UNORDERED;
10356 break;
10357 case ORDERED_EXPR:
10358 code = ORDERED;
10359 break;
10360 case UNLT_EXPR:
10361 code = UNLT;
10362 break;
10363 case UNLE_EXPR:
10364 code = UNLE;
10365 break;
10366 case UNGT_EXPR:
10367 code = UNGT;
10368 break;
10369 case UNGE_EXPR:
10370 code = UNGE;
10371 break;
10372 case UNEQ_EXPR:
10373 code = UNEQ;
10374 break;
10376 default:
10377 abort ();
10380 /* Put a constant second. */
10381 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10383 tem = arg0; arg0 = arg1; arg1 = tem;
10384 code = swap_condition (code);
10387 /* If this is an equality or inequality test of a single bit, we can
10388 do this by shifting the bit being tested to the low-order bit and
10389 masking the result with the constant 1. If the condition was EQ,
10390 we xor it with 1. This does not require an scc insn and is faster
10391 than an scc insn even if we have it. */
10393 if ((code == NE || code == EQ)
10394 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10395 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10397 tree inner = TREE_OPERAND (arg0, 0);
10398 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10399 int ops_unsignedp;
10401 /* If INNER is a right shift of a constant and it plus BITNUM does
10402 not overflow, adjust BITNUM and INNER. */
10404 if (TREE_CODE (inner) == RSHIFT_EXPR
10405 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10406 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10407 && bitnum < TYPE_PRECISION (type)
10408 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10409 bitnum - TYPE_PRECISION (type)))
10411 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10412 inner = TREE_OPERAND (inner, 0);
10415 /* If we are going to be able to omit the AND below, we must do our
10416 operations as unsigned. If we must use the AND, we have a choice.
10417 Normally unsigned is faster, but for some machines signed is. */
10418 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10419 #ifdef LOAD_EXTEND_OP
10420 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10421 #else
10423 #endif
10426 if (! get_subtarget (subtarget)
10427 || GET_MODE (subtarget) != operand_mode
10428 || ! safe_from_p (subtarget, inner, 1))
10429 subtarget = 0;
10431 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10433 if (bitnum != 0)
10434 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10435 size_int (bitnum), subtarget, ops_unsignedp);
10437 if (GET_MODE (op0) != mode)
10438 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10440 if ((code == EQ && ! invert) || (code == NE && invert))
10441 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10442 ops_unsignedp, OPTAB_LIB_WIDEN);
10444 /* Put the AND last so it can combine with more things. */
10445 if (bitnum != TYPE_PRECISION (type) - 1)
10446 op0 = expand_and (op0, const1_rtx, subtarget);
10448 return op0;
10451 /* Now see if we are likely to be able to do this. Return if not. */
10452 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10453 return 0;
10455 icode = setcc_gen_code[(int) code];
10456 if (icode == CODE_FOR_nothing
10457 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10459 /* We can only do this if it is one of the special cases that
10460 can be handled without an scc insn. */
10461 if ((code == LT && integer_zerop (arg1))
10462 || (! only_cheap && code == GE && integer_zerop (arg1)))
10464 else if (BRANCH_COST >= 0
10465 && ! only_cheap && (code == NE || code == EQ)
10466 && TREE_CODE (type) != REAL_TYPE
10467 && ((abs_optab->handlers[(int) operand_mode].insn_code
10468 != CODE_FOR_nothing)
10469 || (ffs_optab->handlers[(int) operand_mode].insn_code
10470 != CODE_FOR_nothing)))
10472 else
10473 return 0;
10476 if (! get_subtarget (target)
10477 || GET_MODE (subtarget) != operand_mode
10478 || ! safe_from_p (subtarget, arg1, 1))
10479 subtarget = 0;
10481 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10482 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10484 if (target == 0)
10485 target = gen_reg_rtx (mode);
10487 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10488 because, if the emit_store_flag does anything it will succeed and
10489 OP0 and OP1 will not be used subsequently. */
10491 result = emit_store_flag (target, code,
10492 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10493 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10494 operand_mode, unsignedp, 1);
10496 if (result)
10498 if (invert)
10499 result = expand_binop (mode, xor_optab, result, const1_rtx,
10500 result, 0, OPTAB_LIB_WIDEN);
10501 return result;
10504 /* If this failed, we have to do this with set/compare/jump/set code. */
10505 if (GET_CODE (target) != REG
10506 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10507 target = gen_reg_rtx (GET_MODE (target));
10509 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10510 result = compare_from_rtx (op0, op1, code, unsignedp,
10511 operand_mode, NULL_RTX, 0);
10512 if (GET_CODE (result) == CONST_INT)
10513 return (((result == const0_rtx && ! invert)
10514 || (result != const0_rtx && invert))
10515 ? const0_rtx : const1_rtx);
10517 label = gen_label_rtx ();
10518 if (bcc_gen_fctn[(int) code] == 0)
10519 abort ();
10521 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10522 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10523 emit_label (label);
10525 return target;
10528 /* Generate a tablejump instruction (used for switch statements). */
10530 #ifdef HAVE_tablejump
10532 /* INDEX is the value being switched on, with the lowest value
10533 in the table already subtracted.
10534 MODE is its expected mode (needed if INDEX is constant).
10535 RANGE is the length of the jump table.
10536 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10538 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10539 index value is out of range. */
10541 void
10542 do_tablejump (index, mode, range, table_label, default_label)
10543 rtx index, range, table_label, default_label;
10544 enum machine_mode mode;
10546 register rtx temp, vector;
10548 /* Do an unsigned comparison (in the proper mode) between the index
10549 expression and the value which represents the length of the range.
10550 Since we just finished subtracting the lower bound of the range
10551 from the index expression, this comparison allows us to simultaneously
10552 check that the original index expression value is both greater than
10553 or equal to the minimum value of the range and less than or equal to
10554 the maximum value of the range. */
10556 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10557 0, default_label);
10559 /* If index is in range, it must fit in Pmode.
10560 Convert to Pmode so we can index with it. */
10561 if (mode != Pmode)
10562 index = convert_to_mode (Pmode, index, 1);
10564 /* Don't let a MEM slip thru, because then INDEX that comes
10565 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10566 and break_out_memory_refs will go to work on it and mess it up. */
10567 #ifdef PIC_CASE_VECTOR_ADDRESS
10568 if (flag_pic && GET_CODE (index) != REG)
10569 index = copy_to_mode_reg (Pmode, index);
10570 #endif
10572 /* If flag_force_addr were to affect this address
10573 it could interfere with the tricky assumptions made
10574 about addresses that contain label-refs,
10575 which may be valid only very near the tablejump itself. */
10576 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10577 GET_MODE_SIZE, because this indicates how large insns are. The other
10578 uses should all be Pmode, because they are addresses. This code
10579 could fail if addresses and insns are not the same size. */
10580 index = gen_rtx_PLUS (Pmode,
10581 gen_rtx_MULT (Pmode, index,
10582 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10583 gen_rtx_LABEL_REF (Pmode, table_label));
10584 #ifdef PIC_CASE_VECTOR_ADDRESS
10585 if (flag_pic)
10586 index = PIC_CASE_VECTOR_ADDRESS (index);
10587 else
10588 #endif
10589 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10590 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10591 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10592 RTX_UNCHANGING_P (vector) = 1;
10593 convert_move (temp, vector, 0);
10595 emit_jump_insn (gen_tablejump (temp, table_label));
10597 /* If we are generating PIC code or if the table is PC-relative, the
10598 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10599 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10600 emit_barrier ();
10603 #endif /* HAVE_tablejump */