* expr.c (expand_expr, case ARRAY_REF): Do not replace
[official-gcc.git] / gcc / expr.c
blobbff26164f44ad99940aef0e3c5084db9e80fbcb6
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-config.h"
34 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
35 #include "expr.h"
36 #include "recog.h"
37 #include "reload.h"
38 #include "output.h"
39 #include "typeclass.h"
40 #include "toplev.h"
41 #include "ggc.h"
42 #include "intl.h"
43 #include "tm_p.h"
45 /* Decide whether a function's arguments should be processed
46 from first to last or from last to first.
48 They should if the stack and args grow in opposite directions, but
49 only if we have push insns. */
51 #ifdef PUSH_ROUNDING
53 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
54 #define PUSH_ARGS_REVERSED /* If it's last to first. */
55 #endif
57 #endif
59 #ifndef STACK_PUSH_CODE
60 #ifdef STACK_GROWS_DOWNWARD
61 #define STACK_PUSH_CODE PRE_DEC
62 #else
63 #define STACK_PUSH_CODE PRE_INC
64 #endif
65 #endif
67 /* Assume that case vectors are not pc-relative. */
68 #ifndef CASE_VECTOR_PC_RELATIVE
69 #define CASE_VECTOR_PC_RELATIVE 0
70 #endif
72 /* Hook called by safe_from_p for language-specific tree codes. It is
73 up to the language front-end to install a hook if it has any such
74 codes that safe_from_p needs to know about. Since same_from_p will
75 recursively explore the TREE_OPERANDs of an expression, this hook
76 should not reexamine those pieces. This routine may recursively
77 call safe_from_p; it should always pass `0' as the TOP_P
78 parameter. */
79 int (*lang_safe_from_p) PARAMS ((rtx, tree));
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
87 int cse_not_expected;
89 /* Don't check memory usage, since code is being emitted to check a memory
90 usage. Used when current_function_check_memory_usage is true, to avoid
91 infinite recursion. */
92 static int in_check_memory_usage;
94 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
95 static tree placeholder_list = 0;
97 /* This structure is used by move_by_pieces to describe the move to
98 be performed. */
99 struct move_by_pieces
101 rtx to;
102 rtx to_addr;
103 int autinc_to;
104 int explicit_inc_to;
105 rtx from;
106 rtx from_addr;
107 int autinc_from;
108 int explicit_inc_from;
109 unsigned HOST_WIDE_INT len;
110 HOST_WIDE_INT offset;
111 int reverse;
114 /* This structure is used by store_by_pieces to describe the clear to
115 be performed. */
117 struct store_by_pieces
119 rtx to;
120 rtx to_addr;
121 int autinc_to;
122 int explicit_inc_to;
123 unsigned HOST_WIDE_INT len;
124 HOST_WIDE_INT offset;
125 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
126 PTR constfundata;
127 int reverse;
130 extern struct obstack permanent_obstack;
132 static rtx get_push_address PARAMS ((int));
134 static rtx enqueue_insn PARAMS ((rtx, rtx));
135 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
136 PARAMS ((unsigned HOST_WIDE_INT,
137 unsigned int));
138 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
139 struct move_by_pieces *));
140 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
141 enum machine_mode));
142 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
143 unsigned int));
144 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
145 unsigned int));
146 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
147 enum machine_mode,
148 struct store_by_pieces *));
149 static rtx get_subtarget PARAMS ((rtx));
150 static int is_zeros_p PARAMS ((tree));
151 static int mostly_zeros_p PARAMS ((tree));
152 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
153 HOST_WIDE_INT, enum machine_mode,
154 tree, tree, unsigned int, int,
155 int));
156 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
157 HOST_WIDE_INT));
158 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
159 HOST_WIDE_INT, enum machine_mode,
160 tree, enum machine_mode, int,
161 unsigned int, HOST_WIDE_INT, int));
162 static enum memory_use_mode
163 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
164 static tree save_noncopied_parts PARAMS ((tree, tree));
165 static tree init_noncopied_parts PARAMS ((tree, tree));
166 static int fixed_type_p PARAMS ((tree));
167 static rtx var_rtx PARAMS ((tree));
168 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
169 static rtx expand_increment PARAMS ((tree, int, int));
170 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
171 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
172 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
173 rtx, rtx));
174 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
176 /* Record for each mode whether we can move a register directly to or
177 from an object of that mode in memory. If we can't, we won't try
178 to use that mode directly when accessing a field of that mode. */
180 static char direct_load[NUM_MACHINE_MODES];
181 static char direct_store[NUM_MACHINE_MODES];
183 /* If a memory-to-memory move would take MOVE_RATIO or more simple
184 move-instruction sequences, we will do a movstr or libcall instead. */
186 #ifndef MOVE_RATIO
187 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
188 #define MOVE_RATIO 2
189 #else
190 /* If we are optimizing for space (-Os), cut down the default move ratio. */
191 #define MOVE_RATIO (optimize_size ? 3 : 15)
192 #endif
193 #endif
195 /* This macro is used to determine whether move_by_pieces should be called
196 to perform a structure copy. */
197 #ifndef MOVE_BY_PIECES_P
198 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
199 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
200 #endif
202 /* This array records the insn_code of insns to perform block moves. */
203 enum insn_code movstr_optab[NUM_MACHINE_MODES];
205 /* This array records the insn_code of insns to perform block clears. */
206 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
208 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
210 #ifndef SLOW_UNALIGNED_ACCESS
211 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
212 #endif
214 /* This is run once per compilation to set up which modes can be used
215 directly in memory and to initialize the block move optab. */
217 void
218 init_expr_once ()
220 rtx insn, pat;
221 enum machine_mode mode;
222 int num_clobbers;
223 rtx mem, mem1;
225 start_sequence ();
227 /* Try indexing by frame ptr and try by stack ptr.
228 It is known that on the Convex the stack ptr isn't a valid index.
229 With luck, one or the other is valid on any machine. */
230 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
231 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
233 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
234 pat = PATTERN (insn);
236 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
237 mode = (enum machine_mode) ((int) mode + 1))
239 int regno;
240 rtx reg;
242 direct_load[(int) mode] = direct_store[(int) mode] = 0;
243 PUT_MODE (mem, mode);
244 PUT_MODE (mem1, mode);
246 /* See if there is some register that can be used in this mode and
247 directly loaded or stored from memory. */
249 if (mode != VOIDmode && mode != BLKmode)
250 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
251 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
252 regno++)
254 if (! HARD_REGNO_MODE_OK (regno, mode))
255 continue;
257 reg = gen_rtx_REG (mode, regno);
259 SET_SRC (pat) = mem;
260 SET_DEST (pat) = reg;
261 if (recog (pat, insn, &num_clobbers) >= 0)
262 direct_load[(int) mode] = 1;
264 SET_SRC (pat) = mem1;
265 SET_DEST (pat) = reg;
266 if (recog (pat, insn, &num_clobbers) >= 0)
267 direct_load[(int) mode] = 1;
269 SET_SRC (pat) = reg;
270 SET_DEST (pat) = mem;
271 if (recog (pat, insn, &num_clobbers) >= 0)
272 direct_store[(int) mode] = 1;
274 SET_SRC (pat) = reg;
275 SET_DEST (pat) = mem1;
276 if (recog (pat, insn, &num_clobbers) >= 0)
277 direct_store[(int) mode] = 1;
281 end_sequence ();
284 /* This is run at the start of compiling a function. */
286 void
287 init_expr ()
289 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
291 pending_chain = 0;
292 pending_stack_adjust = 0;
293 stack_pointer_delta = 0;
294 inhibit_defer_pop = 0;
295 saveregs_value = 0;
296 apply_args_value = 0;
297 forced_labels = 0;
300 void
301 mark_expr_status (p)
302 struct expr_status *p;
304 if (p == NULL)
305 return;
307 ggc_mark_rtx (p->x_saveregs_value);
308 ggc_mark_rtx (p->x_apply_args_value);
309 ggc_mark_rtx (p->x_forced_labels);
312 void
313 free_expr_status (f)
314 struct function *f;
316 free (f->expr);
317 f->expr = NULL;
320 /* Small sanity check that the queue is empty at the end of a function. */
322 void
323 finish_expr_for_function ()
325 if (pending_chain)
326 abort ();
329 /* Manage the queue of increment instructions to be output
330 for POSTINCREMENT_EXPR expressions, etc. */
332 /* Queue up to increment (or change) VAR later. BODY says how:
333 BODY should be the same thing you would pass to emit_insn
334 to increment right away. It will go to emit_insn later on.
336 The value is a QUEUED expression to be used in place of VAR
337 where you want to guarantee the pre-incrementation value of VAR. */
339 static rtx
340 enqueue_insn (var, body)
341 rtx var, body;
343 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
344 body, pending_chain);
345 return pending_chain;
348 /* Use protect_from_queue to convert a QUEUED expression
349 into something that you can put immediately into an instruction.
350 If the queued incrementation has not happened yet,
351 protect_from_queue returns the variable itself.
352 If the incrementation has happened, protect_from_queue returns a temp
353 that contains a copy of the old value of the variable.
355 Any time an rtx which might possibly be a QUEUED is to be put
356 into an instruction, it must be passed through protect_from_queue first.
357 QUEUED expressions are not meaningful in instructions.
359 Do not pass a value through protect_from_queue and then hold
360 on to it for a while before putting it in an instruction!
361 If the queue is flushed in between, incorrect code will result. */
364 protect_from_queue (x, modify)
365 register rtx x;
366 int modify;
368 register RTX_CODE code = GET_CODE (x);
370 #if 0 /* A QUEUED can hang around after the queue is forced out. */
371 /* Shortcut for most common case. */
372 if (pending_chain == 0)
373 return x;
374 #endif
376 if (code != QUEUED)
378 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
379 use of autoincrement. Make a copy of the contents of the memory
380 location rather than a copy of the address, but not if the value is
381 of mode BLKmode. Don't modify X in place since it might be
382 shared. */
383 if (code == MEM && GET_MODE (x) != BLKmode
384 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
386 register rtx y = XEXP (x, 0);
387 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
389 MEM_COPY_ATTRIBUTES (new, x);
391 if (QUEUED_INSN (y))
393 register rtx temp = gen_reg_rtx (GET_MODE (new));
394 emit_insn_before (gen_move_insn (temp, new),
395 QUEUED_INSN (y));
396 return temp;
398 /* Copy the address into a pseudo, so that the returned value
399 remains correct across calls to emit_queue. */
400 XEXP (new, 0) = copy_to_reg (XEXP (new, 0));
401 return new;
403 /* Otherwise, recursively protect the subexpressions of all
404 the kinds of rtx's that can contain a QUEUED. */
405 if (code == MEM)
407 rtx tem = protect_from_queue (XEXP (x, 0), 0);
408 if (tem != XEXP (x, 0))
410 x = copy_rtx (x);
411 XEXP (x, 0) = tem;
414 else if (code == PLUS || code == MULT)
416 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
417 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
418 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
420 x = copy_rtx (x);
421 XEXP (x, 0) = new0;
422 XEXP (x, 1) = new1;
425 return x;
427 /* If the increment has not happened, use the variable itself. Copy it
428 into a new pseudo so that the value remains correct across calls to
429 emit_queue. */
430 if (QUEUED_INSN (x) == 0)
431 return copy_to_reg (QUEUED_VAR (x));
432 /* If the increment has happened and a pre-increment copy exists,
433 use that copy. */
434 if (QUEUED_COPY (x) != 0)
435 return QUEUED_COPY (x);
436 /* The increment has happened but we haven't set up a pre-increment copy.
437 Set one up now, and use it. */
438 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
439 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
440 QUEUED_INSN (x));
441 return QUEUED_COPY (x);
444 /* Return nonzero if X contains a QUEUED expression:
445 if it contains anything that will be altered by a queued increment.
446 We handle only combinations of MEM, PLUS, MINUS and MULT operators
447 since memory addresses generally contain only those. */
450 queued_subexp_p (x)
451 rtx x;
453 register enum rtx_code code = GET_CODE (x);
454 switch (code)
456 case QUEUED:
457 return 1;
458 case MEM:
459 return queued_subexp_p (XEXP (x, 0));
460 case MULT:
461 case PLUS:
462 case MINUS:
463 return (queued_subexp_p (XEXP (x, 0))
464 || queued_subexp_p (XEXP (x, 1)));
465 default:
466 return 0;
470 /* Perform all the pending incrementations. */
472 void
473 emit_queue ()
475 register rtx p;
476 while ((p = pending_chain))
478 rtx body = QUEUED_BODY (p);
480 if (GET_CODE (body) == SEQUENCE)
482 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
483 emit_insn (QUEUED_BODY (p));
485 else
486 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
487 pending_chain = QUEUED_NEXT (p);
491 /* Copy data from FROM to TO, where the machine modes are not the same.
492 Both modes may be integer, or both may be floating.
493 UNSIGNEDP should be nonzero if FROM is an unsigned type.
494 This causes zero-extension instead of sign-extension. */
496 void
497 convert_move (to, from, unsignedp)
498 register rtx to, from;
499 int unsignedp;
501 enum machine_mode to_mode = GET_MODE (to);
502 enum machine_mode from_mode = GET_MODE (from);
503 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
504 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
505 enum insn_code code;
506 rtx libcall;
508 /* rtx code for making an equivalent value. */
509 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
511 to = protect_from_queue (to, 1);
512 from = protect_from_queue (from, 0);
514 if (to_real != from_real)
515 abort ();
517 /* If FROM is a SUBREG that indicates that we have already done at least
518 the required extension, strip it. We don't handle such SUBREGs as
519 TO here. */
521 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
522 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
523 >= GET_MODE_SIZE (to_mode))
524 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
525 from = gen_lowpart (to_mode, from), from_mode = to_mode;
527 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
528 abort ();
530 if (to_mode == from_mode
531 || (from_mode == VOIDmode && CONSTANT_P (from)))
533 emit_move_insn (to, from);
534 return;
537 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
539 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
540 abort ();
542 if (VECTOR_MODE_P (to_mode))
543 from = gen_rtx_SUBREG (to_mode, from, 0);
544 else
545 to = gen_rtx_SUBREG (from_mode, to, 0);
547 emit_move_insn (to, from);
548 return;
551 if (to_real != from_real)
552 abort ();
554 if (to_real)
556 rtx value, insns;
558 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
560 /* Try converting directly if the insn is supported. */
561 if ((code = can_extend_p (to_mode, from_mode, 0))
562 != CODE_FOR_nothing)
564 emit_unop_insn (code, to, from, UNKNOWN);
565 return;
569 #ifdef HAVE_trunchfqf2
570 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
572 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
573 return;
575 #endif
576 #ifdef HAVE_trunctqfqf2
577 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
579 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
580 return;
582 #endif
583 #ifdef HAVE_truncsfqf2
584 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
586 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
587 return;
589 #endif
590 #ifdef HAVE_truncdfqf2
591 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
593 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
594 return;
596 #endif
597 #ifdef HAVE_truncxfqf2
598 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
600 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
601 return;
603 #endif
604 #ifdef HAVE_trunctfqf2
605 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
607 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
608 return;
610 #endif
612 #ifdef HAVE_trunctqfhf2
613 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
615 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
616 return;
618 #endif
619 #ifdef HAVE_truncsfhf2
620 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
622 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
623 return;
625 #endif
626 #ifdef HAVE_truncdfhf2
627 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
629 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
630 return;
632 #endif
633 #ifdef HAVE_truncxfhf2
634 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
636 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
637 return;
639 #endif
640 #ifdef HAVE_trunctfhf2
641 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
643 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
644 return;
646 #endif
648 #ifdef HAVE_truncsftqf2
649 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
651 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
652 return;
654 #endif
655 #ifdef HAVE_truncdftqf2
656 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
658 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
659 return;
661 #endif
662 #ifdef HAVE_truncxftqf2
663 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
665 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
666 return;
668 #endif
669 #ifdef HAVE_trunctftqf2
670 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
672 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
673 return;
675 #endif
677 #ifdef HAVE_truncdfsf2
678 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
680 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
681 return;
683 #endif
684 #ifdef HAVE_truncxfsf2
685 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
687 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
688 return;
690 #endif
691 #ifdef HAVE_trunctfsf2
692 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
694 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
695 return;
697 #endif
698 #ifdef HAVE_truncxfdf2
699 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
701 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
702 return;
704 #endif
705 #ifdef HAVE_trunctfdf2
706 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
708 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
709 return;
711 #endif
713 libcall = (rtx) 0;
714 switch (from_mode)
716 case SFmode:
717 switch (to_mode)
719 case DFmode:
720 libcall = extendsfdf2_libfunc;
721 break;
723 case XFmode:
724 libcall = extendsfxf2_libfunc;
725 break;
727 case TFmode:
728 libcall = extendsftf2_libfunc;
729 break;
731 default:
732 break;
734 break;
736 case DFmode:
737 switch (to_mode)
739 case SFmode:
740 libcall = truncdfsf2_libfunc;
741 break;
743 case XFmode:
744 libcall = extenddfxf2_libfunc;
745 break;
747 case TFmode:
748 libcall = extenddftf2_libfunc;
749 break;
751 default:
752 break;
754 break;
756 case XFmode:
757 switch (to_mode)
759 case SFmode:
760 libcall = truncxfsf2_libfunc;
761 break;
763 case DFmode:
764 libcall = truncxfdf2_libfunc;
765 break;
767 default:
768 break;
770 break;
772 case TFmode:
773 switch (to_mode)
775 case SFmode:
776 libcall = trunctfsf2_libfunc;
777 break;
779 case DFmode:
780 libcall = trunctfdf2_libfunc;
781 break;
783 default:
784 break;
786 break;
788 default:
789 break;
792 if (libcall == (rtx) 0)
793 /* This conversion is not implemented yet. */
794 abort ();
796 start_sequence ();
797 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
798 1, from, from_mode);
799 insns = get_insns ();
800 end_sequence ();
801 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
802 from));
803 return;
806 /* Now both modes are integers. */
808 /* Handle expanding beyond a word. */
809 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
810 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
812 rtx insns;
813 rtx lowpart;
814 rtx fill_value;
815 rtx lowfrom;
816 int i;
817 enum machine_mode lowpart_mode;
818 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
820 /* Try converting directly if the insn is supported. */
821 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
822 != CODE_FOR_nothing)
824 /* If FROM is a SUBREG, put it into a register. Do this
825 so that we always generate the same set of insns for
826 better cse'ing; if an intermediate assignment occurred,
827 we won't be doing the operation directly on the SUBREG. */
828 if (optimize > 0 && GET_CODE (from) == SUBREG)
829 from = force_reg (from_mode, from);
830 emit_unop_insn (code, to, from, equiv_code);
831 return;
833 /* Next, try converting via full word. */
834 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
835 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
836 != CODE_FOR_nothing))
838 if (GET_CODE (to) == REG)
839 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
840 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
841 emit_unop_insn (code, to,
842 gen_lowpart (word_mode, to), equiv_code);
843 return;
846 /* No special multiword conversion insn; do it by hand. */
847 start_sequence ();
849 /* Since we will turn this into a no conflict block, we must ensure
850 that the source does not overlap the target. */
852 if (reg_overlap_mentioned_p (to, from))
853 from = force_reg (from_mode, from);
855 /* Get a copy of FROM widened to a word, if necessary. */
856 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
857 lowpart_mode = word_mode;
858 else
859 lowpart_mode = from_mode;
861 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
863 lowpart = gen_lowpart (lowpart_mode, to);
864 emit_move_insn (lowpart, lowfrom);
866 /* Compute the value to put in each remaining word. */
867 if (unsignedp)
868 fill_value = const0_rtx;
869 else
871 #ifdef HAVE_slt
872 if (HAVE_slt
873 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
874 && STORE_FLAG_VALUE == -1)
876 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
877 lowpart_mode, 0, 0);
878 fill_value = gen_reg_rtx (word_mode);
879 emit_insn (gen_slt (fill_value));
881 else
882 #endif
884 fill_value
885 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
886 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
887 NULL_RTX, 0);
888 fill_value = convert_to_mode (word_mode, fill_value, 1);
892 /* Fill the remaining words. */
893 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
895 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
896 rtx subword = operand_subword (to, index, 1, to_mode);
898 if (subword == 0)
899 abort ();
901 if (fill_value != subword)
902 emit_move_insn (subword, fill_value);
905 insns = get_insns ();
906 end_sequence ();
908 emit_no_conflict_block (insns, to, from, NULL_RTX,
909 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
910 return;
913 /* Truncating multi-word to a word or less. */
914 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
915 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
917 if (!((GET_CODE (from) == MEM
918 && ! MEM_VOLATILE_P (from)
919 && direct_load[(int) to_mode]
920 && ! mode_dependent_address_p (XEXP (from, 0)))
921 || GET_CODE (from) == REG
922 || GET_CODE (from) == SUBREG))
923 from = force_reg (from_mode, from);
924 convert_move (to, gen_lowpart (word_mode, from), 0);
925 return;
928 /* Handle pointer conversion. */ /* SPEE 900220. */
929 if (to_mode == PQImode)
931 if (from_mode != QImode)
932 from = convert_to_mode (QImode, from, unsignedp);
934 #ifdef HAVE_truncqipqi2
935 if (HAVE_truncqipqi2)
937 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
938 return;
940 #endif /* HAVE_truncqipqi2 */
941 abort ();
944 if (from_mode == PQImode)
946 if (to_mode != QImode)
948 from = convert_to_mode (QImode, from, unsignedp);
949 from_mode = QImode;
951 else
953 #ifdef HAVE_extendpqiqi2
954 if (HAVE_extendpqiqi2)
956 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
957 return;
959 #endif /* HAVE_extendpqiqi2 */
960 abort ();
964 if (to_mode == PSImode)
966 if (from_mode != SImode)
967 from = convert_to_mode (SImode, from, unsignedp);
969 #ifdef HAVE_truncsipsi2
970 if (HAVE_truncsipsi2)
972 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
973 return;
975 #endif /* HAVE_truncsipsi2 */
976 abort ();
979 if (from_mode == PSImode)
981 if (to_mode != SImode)
983 from = convert_to_mode (SImode, from, unsignedp);
984 from_mode = SImode;
986 else
988 #ifdef HAVE_extendpsisi2
989 if (! unsignedp && HAVE_extendpsisi2)
991 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
992 return;
994 #endif /* HAVE_extendpsisi2 */
995 #ifdef HAVE_zero_extendpsisi2
996 if (unsignedp && HAVE_zero_extendpsisi2)
998 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
999 return;
1001 #endif /* HAVE_zero_extendpsisi2 */
1002 abort ();
1006 if (to_mode == PDImode)
1008 if (from_mode != DImode)
1009 from = convert_to_mode (DImode, from, unsignedp);
1011 #ifdef HAVE_truncdipdi2
1012 if (HAVE_truncdipdi2)
1014 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1015 return;
1017 #endif /* HAVE_truncdipdi2 */
1018 abort ();
1021 if (from_mode == PDImode)
1023 if (to_mode != DImode)
1025 from = convert_to_mode (DImode, from, unsignedp);
1026 from_mode = DImode;
1028 else
1030 #ifdef HAVE_extendpdidi2
1031 if (HAVE_extendpdidi2)
1033 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1034 return;
1036 #endif /* HAVE_extendpdidi2 */
1037 abort ();
1041 /* Now follow all the conversions between integers
1042 no more than a word long. */
1044 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1045 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1046 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1047 GET_MODE_BITSIZE (from_mode)))
1049 if (!((GET_CODE (from) == MEM
1050 && ! MEM_VOLATILE_P (from)
1051 && direct_load[(int) to_mode]
1052 && ! mode_dependent_address_p (XEXP (from, 0)))
1053 || GET_CODE (from) == REG
1054 || GET_CODE (from) == SUBREG))
1055 from = force_reg (from_mode, from);
1056 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1057 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1058 from = copy_to_reg (from);
1059 emit_move_insn (to, gen_lowpart (to_mode, from));
1060 return;
1063 /* Handle extension. */
1064 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1066 /* Convert directly if that works. */
1067 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1068 != CODE_FOR_nothing)
1070 emit_unop_insn (code, to, from, equiv_code);
1071 return;
1073 else
1075 enum machine_mode intermediate;
1076 rtx tmp;
1077 tree shift_amount;
1079 /* Search for a mode to convert via. */
1080 for (intermediate = from_mode; intermediate != VOIDmode;
1081 intermediate = GET_MODE_WIDER_MODE (intermediate))
1082 if (((can_extend_p (to_mode, intermediate, unsignedp)
1083 != CODE_FOR_nothing)
1084 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1085 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1086 GET_MODE_BITSIZE (intermediate))))
1087 && (can_extend_p (intermediate, from_mode, unsignedp)
1088 != CODE_FOR_nothing))
1090 convert_move (to, convert_to_mode (intermediate, from,
1091 unsignedp), unsignedp);
1092 return;
1095 /* No suitable intermediate mode.
1096 Generate what we need with shifts. */
1097 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1098 - GET_MODE_BITSIZE (from_mode), 0);
1099 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1100 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1101 to, unsignedp);
1102 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1103 to, unsignedp);
1104 if (tmp != to)
1105 emit_move_insn (to, tmp);
1106 return;
1110 /* Support special truncate insns for certain modes. */
1112 if (from_mode == DImode && to_mode == SImode)
1114 #ifdef HAVE_truncdisi2
1115 if (HAVE_truncdisi2)
1117 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1118 return;
1120 #endif
1121 convert_move (to, force_reg (from_mode, from), unsignedp);
1122 return;
1125 if (from_mode == DImode && to_mode == HImode)
1127 #ifdef HAVE_truncdihi2
1128 if (HAVE_truncdihi2)
1130 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1131 return;
1133 #endif
1134 convert_move (to, force_reg (from_mode, from), unsignedp);
1135 return;
1138 if (from_mode == DImode && to_mode == QImode)
1140 #ifdef HAVE_truncdiqi2
1141 if (HAVE_truncdiqi2)
1143 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1144 return;
1146 #endif
1147 convert_move (to, force_reg (from_mode, from), unsignedp);
1148 return;
1151 if (from_mode == SImode && to_mode == HImode)
1153 #ifdef HAVE_truncsihi2
1154 if (HAVE_truncsihi2)
1156 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1157 return;
1159 #endif
1160 convert_move (to, force_reg (from_mode, from), unsignedp);
1161 return;
1164 if (from_mode == SImode && to_mode == QImode)
1166 #ifdef HAVE_truncsiqi2
1167 if (HAVE_truncsiqi2)
1169 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1170 return;
1172 #endif
1173 convert_move (to, force_reg (from_mode, from), unsignedp);
1174 return;
1177 if (from_mode == HImode && to_mode == QImode)
1179 #ifdef HAVE_trunchiqi2
1180 if (HAVE_trunchiqi2)
1182 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1183 return;
1185 #endif
1186 convert_move (to, force_reg (from_mode, from), unsignedp);
1187 return;
1190 if (from_mode == TImode && to_mode == DImode)
1192 #ifdef HAVE_trunctidi2
1193 if (HAVE_trunctidi2)
1195 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1196 return;
1198 #endif
1199 convert_move (to, force_reg (from_mode, from), unsignedp);
1200 return;
1203 if (from_mode == TImode && to_mode == SImode)
1205 #ifdef HAVE_trunctisi2
1206 if (HAVE_trunctisi2)
1208 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1209 return;
1211 #endif
1212 convert_move (to, force_reg (from_mode, from), unsignedp);
1213 return;
1216 if (from_mode == TImode && to_mode == HImode)
1218 #ifdef HAVE_trunctihi2
1219 if (HAVE_trunctihi2)
1221 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1222 return;
1224 #endif
1225 convert_move (to, force_reg (from_mode, from), unsignedp);
1226 return;
1229 if (from_mode == TImode && to_mode == QImode)
1231 #ifdef HAVE_trunctiqi2
1232 if (HAVE_trunctiqi2)
1234 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1235 return;
1237 #endif
1238 convert_move (to, force_reg (from_mode, from), unsignedp);
1239 return;
1242 /* Handle truncation of volatile memrefs, and so on;
1243 the things that couldn't be truncated directly,
1244 and for which there was no special instruction. */
1245 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1247 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1248 emit_move_insn (to, temp);
1249 return;
1252 /* Mode combination is not recognized. */
1253 abort ();
1256 /* Return an rtx for a value that would result
1257 from converting X to mode MODE.
1258 Both X and MODE may be floating, or both integer.
1259 UNSIGNEDP is nonzero if X is an unsigned value.
1260 This can be done by referring to a part of X in place
1261 or by copying to a new temporary with conversion.
1263 This function *must not* call protect_from_queue
1264 except when putting X into an insn (in which case convert_move does it). */
1267 convert_to_mode (mode, x, unsignedp)
1268 enum machine_mode mode;
1269 rtx x;
1270 int unsignedp;
1272 return convert_modes (mode, VOIDmode, x, unsignedp);
1275 /* Return an rtx for a value that would result
1276 from converting X from mode OLDMODE to mode MODE.
1277 Both modes may be floating, or both integer.
1278 UNSIGNEDP is nonzero if X is an unsigned value.
1280 This can be done by referring to a part of X in place
1281 or by copying to a new temporary with conversion.
1283 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1285 This function *must not* call protect_from_queue
1286 except when putting X into an insn (in which case convert_move does it). */
1289 convert_modes (mode, oldmode, x, unsignedp)
1290 enum machine_mode mode, oldmode;
1291 rtx x;
1292 int unsignedp;
1294 register rtx temp;
1296 /* If FROM is a SUBREG that indicates that we have already done at least
1297 the required extension, strip it. */
1299 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1300 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1301 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1302 x = gen_lowpart (mode, x);
1304 if (GET_MODE (x) != VOIDmode)
1305 oldmode = GET_MODE (x);
1307 if (mode == oldmode)
1308 return x;
1310 /* There is one case that we must handle specially: If we are converting
1311 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1312 we are to interpret the constant as unsigned, gen_lowpart will do
1313 the wrong if the constant appears negative. What we want to do is
1314 make the high-order word of the constant zero, not all ones. */
1316 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1317 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1318 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1320 HOST_WIDE_INT val = INTVAL (x);
1322 if (oldmode != VOIDmode
1323 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1325 int width = GET_MODE_BITSIZE (oldmode);
1327 /* We need to zero extend VAL. */
1328 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1331 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1334 /* We can do this with a gen_lowpart if both desired and current modes
1335 are integer, and this is either a constant integer, a register, or a
1336 non-volatile MEM. Except for the constant case where MODE is no
1337 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1339 if ((GET_CODE (x) == CONST_INT
1340 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1341 || (GET_MODE_CLASS (mode) == MODE_INT
1342 && GET_MODE_CLASS (oldmode) == MODE_INT
1343 && (GET_CODE (x) == CONST_DOUBLE
1344 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1345 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1346 && direct_load[(int) mode])
1347 || (GET_CODE (x) == REG
1348 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1349 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1351 /* ?? If we don't know OLDMODE, we have to assume here that
1352 X does not need sign- or zero-extension. This may not be
1353 the case, but it's the best we can do. */
1354 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1355 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1357 HOST_WIDE_INT val = INTVAL (x);
1358 int width = GET_MODE_BITSIZE (oldmode);
1360 /* We must sign or zero-extend in this case. Start by
1361 zero-extending, then sign extend if we need to. */
1362 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1363 if (! unsignedp
1364 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1365 val |= (HOST_WIDE_INT) (-1) << width;
1367 return GEN_INT (val);
1370 return gen_lowpart (mode, x);
1373 temp = gen_reg_rtx (mode);
1374 convert_move (temp, x, unsignedp);
1375 return temp;
1378 /* This macro is used to determine what the largest unit size that
1379 move_by_pieces can use is. */
1381 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1382 move efficiently, as opposed to MOVE_MAX which is the maximum
1383 number of bytes we can move with a single instruction. */
1385 #ifndef MOVE_MAX_PIECES
1386 #define MOVE_MAX_PIECES MOVE_MAX
1387 #endif
1389 /* Generate several move instructions to copy LEN bytes
1390 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1391 The caller must pass FROM and TO
1392 through protect_from_queue before calling.
1393 ALIGN is maximum alignment we can assume. */
1395 void
1396 move_by_pieces (to, from, len, align)
1397 rtx to, from;
1398 unsigned HOST_WIDE_INT len;
1399 unsigned int align;
1401 struct move_by_pieces data;
1402 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1403 unsigned int max_size = MOVE_MAX_PIECES + 1;
1404 enum machine_mode mode = VOIDmode, tmode;
1405 enum insn_code icode;
1407 data.offset = 0;
1408 data.to_addr = to_addr;
1409 data.from_addr = from_addr;
1410 data.to = to;
1411 data.from = from;
1412 data.autinc_to
1413 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1414 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1415 data.autinc_from
1416 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1417 || GET_CODE (from_addr) == POST_INC
1418 || GET_CODE (from_addr) == POST_DEC);
1420 data.explicit_inc_from = 0;
1421 data.explicit_inc_to = 0;
1422 data.reverse
1423 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1424 if (data.reverse) data.offset = len;
1425 data.len = len;
1427 /* If copying requires more than two move insns,
1428 copy addresses to registers (to make displacements shorter)
1429 and use post-increment if available. */
1430 if (!(data.autinc_from && data.autinc_to)
1431 && move_by_pieces_ninsns (len, align) > 2)
1433 /* Find the mode of the largest move... */
1434 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1435 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1436 if (GET_MODE_SIZE (tmode) < max_size)
1437 mode = tmode;
1439 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1441 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1442 data.autinc_from = 1;
1443 data.explicit_inc_from = -1;
1445 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1447 data.from_addr = copy_addr_to_reg (from_addr);
1448 data.autinc_from = 1;
1449 data.explicit_inc_from = 1;
1451 if (!data.autinc_from && CONSTANT_P (from_addr))
1452 data.from_addr = copy_addr_to_reg (from_addr);
1453 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1455 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1456 data.autinc_to = 1;
1457 data.explicit_inc_to = -1;
1459 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1461 data.to_addr = copy_addr_to_reg (to_addr);
1462 data.autinc_to = 1;
1463 data.explicit_inc_to = 1;
1465 if (!data.autinc_to && CONSTANT_P (to_addr))
1466 data.to_addr = copy_addr_to_reg (to_addr);
1469 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1470 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1471 align = MOVE_MAX * BITS_PER_UNIT;
1473 /* First move what we can in the largest integer mode, then go to
1474 successively smaller modes. */
1476 while (max_size > 1)
1478 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1479 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1480 if (GET_MODE_SIZE (tmode) < max_size)
1481 mode = tmode;
1483 if (mode == VOIDmode)
1484 break;
1486 icode = mov_optab->handlers[(int) mode].insn_code;
1487 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1488 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1490 max_size = GET_MODE_SIZE (mode);
1493 /* The code above should have handled everything. */
1494 if (data.len > 0)
1495 abort ();
1498 /* Return number of insns required to move L bytes by pieces.
1499 ALIGN (in bits) is maximum alignment we can assume. */
1501 static unsigned HOST_WIDE_INT
1502 move_by_pieces_ninsns (l, align)
1503 unsigned HOST_WIDE_INT l;
1504 unsigned int align;
1506 unsigned HOST_WIDE_INT n_insns = 0;
1507 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1509 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1510 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1511 align = MOVE_MAX * BITS_PER_UNIT;
1513 while (max_size > 1)
1515 enum machine_mode mode = VOIDmode, tmode;
1516 enum insn_code icode;
1518 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1519 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1520 if (GET_MODE_SIZE (tmode) < max_size)
1521 mode = tmode;
1523 if (mode == VOIDmode)
1524 break;
1526 icode = mov_optab->handlers[(int) mode].insn_code;
1527 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1528 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1530 max_size = GET_MODE_SIZE (mode);
1533 if (l)
1534 abort ();
1535 return n_insns;
1538 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1539 with move instructions for mode MODE. GENFUN is the gen_... function
1540 to make a move insn for that mode. DATA has all the other info. */
1542 static void
1543 move_by_pieces_1 (genfun, mode, data)
1544 rtx (*genfun) PARAMS ((rtx, ...));
1545 enum machine_mode mode;
1546 struct move_by_pieces *data;
1548 unsigned int size = GET_MODE_SIZE (mode);
1549 rtx to1, from1;
1551 while (data->len >= size)
1553 if (data->reverse)
1554 data->offset -= size;
1556 if (data->autinc_to)
1558 to1 = gen_rtx_MEM (mode, data->to_addr);
1559 MEM_COPY_ATTRIBUTES (to1, data->to);
1561 else
1562 to1 = change_address (data->to, mode,
1563 plus_constant (data->to_addr, data->offset));
1565 if (data->autinc_from)
1567 from1 = gen_rtx_MEM (mode, data->from_addr);
1568 MEM_COPY_ATTRIBUTES (from1, data->from);
1570 else
1571 from1 = change_address (data->from, mode,
1572 plus_constant (data->from_addr, data->offset));
1574 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1575 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1576 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1577 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1579 emit_insn ((*genfun) (to1, from1));
1581 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1582 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1583 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1584 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1586 if (! data->reverse)
1587 data->offset += size;
1589 data->len -= size;
1593 /* Emit code to move a block Y to a block X.
1594 This may be done with string-move instructions,
1595 with multiple scalar move instructions, or with a library call.
1597 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1598 with mode BLKmode.
1599 SIZE is an rtx that says how long they are.
1600 ALIGN is the maximum alignment we can assume they have.
1602 Return the address of the new block, if memcpy is called and returns it,
1603 0 otherwise. */
1606 emit_block_move (x, y, size, align)
1607 rtx x, y;
1608 rtx size;
1609 unsigned int align;
1611 rtx retval = 0;
1612 #ifdef TARGET_MEM_FUNCTIONS
1613 static tree fn;
1614 tree call_expr, arg_list;
1615 #endif
1617 if (GET_MODE (x) != BLKmode)
1618 abort ();
1620 if (GET_MODE (y) != BLKmode)
1621 abort ();
1623 x = protect_from_queue (x, 1);
1624 y = protect_from_queue (y, 0);
1625 size = protect_from_queue (size, 0);
1627 if (GET_CODE (x) != MEM)
1628 abort ();
1629 if (GET_CODE (y) != MEM)
1630 abort ();
1631 if (size == 0)
1632 abort ();
1634 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1635 move_by_pieces (x, y, INTVAL (size), align);
1636 else
1638 /* Try the most limited insn first, because there's no point
1639 including more than one in the machine description unless
1640 the more limited one has some advantage. */
1642 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1643 enum machine_mode mode;
1645 /* Since this is a move insn, we don't care about volatility. */
1646 volatile_ok = 1;
1648 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1649 mode = GET_MODE_WIDER_MODE (mode))
1651 enum insn_code code = movstr_optab[(int) mode];
1652 insn_operand_predicate_fn pred;
1654 if (code != CODE_FOR_nothing
1655 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1656 here because if SIZE is less than the mode mask, as it is
1657 returned by the macro, it will definitely be less than the
1658 actual mode mask. */
1659 && ((GET_CODE (size) == CONST_INT
1660 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1661 <= (GET_MODE_MASK (mode) >> 1)))
1662 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1663 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1664 || (*pred) (x, BLKmode))
1665 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1666 || (*pred) (y, BLKmode))
1667 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1668 || (*pred) (opalign, VOIDmode)))
1670 rtx op2;
1671 rtx last = get_last_insn ();
1672 rtx pat;
1674 op2 = convert_to_mode (mode, size, 1);
1675 pred = insn_data[(int) code].operand[2].predicate;
1676 if (pred != 0 && ! (*pred) (op2, mode))
1677 op2 = copy_to_mode_reg (mode, op2);
1679 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1680 if (pat)
1682 emit_insn (pat);
1683 volatile_ok = 0;
1684 return 0;
1686 else
1687 delete_insns_since (last);
1691 volatile_ok = 0;
1693 /* X, Y, or SIZE may have been passed through protect_from_queue.
1695 It is unsafe to save the value generated by protect_from_queue
1696 and reuse it later. Consider what happens if emit_queue is
1697 called before the return value from protect_from_queue is used.
1699 Expansion of the CALL_EXPR below will call emit_queue before
1700 we are finished emitting RTL for argument setup. So if we are
1701 not careful we could get the wrong value for an argument.
1703 To avoid this problem we go ahead and emit code to copy X, Y &
1704 SIZE into new pseudos. We can then place those new pseudos
1705 into an RTL_EXPR and use them later, even after a call to
1706 emit_queue.
1708 Note this is not strictly needed for library calls since they
1709 do not call emit_queue before loading their arguments. However,
1710 we may need to have library calls call emit_queue in the future
1711 since failing to do so could cause problems for targets which
1712 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1713 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1714 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1716 #ifdef TARGET_MEM_FUNCTIONS
1717 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1718 #else
1719 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1720 TREE_UNSIGNED (integer_type_node));
1721 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1722 #endif
1724 #ifdef TARGET_MEM_FUNCTIONS
1725 /* It is incorrect to use the libcall calling conventions to call
1726 memcpy in this context.
1728 This could be a user call to memcpy and the user may wish to
1729 examine the return value from memcpy.
1731 For targets where libcalls and normal calls have different conventions
1732 for returning pointers, we could end up generating incorrect code.
1734 So instead of using a libcall sequence we build up a suitable
1735 CALL_EXPR and expand the call in the normal fashion. */
1736 if (fn == NULL_TREE)
1738 tree fntype;
1740 /* This was copied from except.c, I don't know if all this is
1741 necessary in this context or not. */
1742 fn = get_identifier ("memcpy");
1743 fntype = build_pointer_type (void_type_node);
1744 fntype = build_function_type (fntype, NULL_TREE);
1745 fn = build_decl (FUNCTION_DECL, fn, fntype);
1746 ggc_add_tree_root (&fn, 1);
1747 DECL_EXTERNAL (fn) = 1;
1748 TREE_PUBLIC (fn) = 1;
1749 DECL_ARTIFICIAL (fn) = 1;
1750 make_decl_rtl (fn, NULL_PTR);
1751 assemble_external (fn);
1754 /* We need to make an argument list for the function call.
1756 memcpy has three arguments, the first two are void * addresses and
1757 the last is a size_t byte count for the copy. */
1758 arg_list
1759 = build_tree_list (NULL_TREE,
1760 make_tree (build_pointer_type (void_type_node), x));
1761 TREE_CHAIN (arg_list)
1762 = build_tree_list (NULL_TREE,
1763 make_tree (build_pointer_type (void_type_node), y));
1764 TREE_CHAIN (TREE_CHAIN (arg_list))
1765 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1766 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1768 /* Now we have to build up the CALL_EXPR itself. */
1769 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1770 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1771 call_expr, arg_list, NULL_TREE);
1772 TREE_SIDE_EFFECTS (call_expr) = 1;
1774 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1775 #else
1776 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1777 VOIDmode, 3, y, Pmode, x, Pmode,
1778 convert_to_mode (TYPE_MODE (integer_type_node), size,
1779 TREE_UNSIGNED (integer_type_node)),
1780 TYPE_MODE (integer_type_node));
1781 #endif
1784 return retval;
1787 /* Copy all or part of a value X into registers starting at REGNO.
1788 The number of registers to be filled is NREGS. */
1790 void
1791 move_block_to_reg (regno, x, nregs, mode)
1792 int regno;
1793 rtx x;
1794 int nregs;
1795 enum machine_mode mode;
1797 int i;
1798 #ifdef HAVE_load_multiple
1799 rtx pat;
1800 rtx last;
1801 #endif
1803 if (nregs == 0)
1804 return;
1806 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1807 x = validize_mem (force_const_mem (mode, x));
1809 /* See if the machine can do this with a load multiple insn. */
1810 #ifdef HAVE_load_multiple
1811 if (HAVE_load_multiple)
1813 last = get_last_insn ();
1814 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1815 GEN_INT (nregs));
1816 if (pat)
1818 emit_insn (pat);
1819 return;
1821 else
1822 delete_insns_since (last);
1824 #endif
1826 for (i = 0; i < nregs; i++)
1827 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1828 operand_subword_force (x, i, mode));
1831 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1832 The number of registers to be filled is NREGS. SIZE indicates the number
1833 of bytes in the object X. */
1835 void
1836 move_block_from_reg (regno, x, nregs, size)
1837 int regno;
1838 rtx x;
1839 int nregs;
1840 int size;
1842 int i;
1843 #ifdef HAVE_store_multiple
1844 rtx pat;
1845 rtx last;
1846 #endif
1847 enum machine_mode mode;
1849 if (nregs == 0)
1850 return;
1852 /* If SIZE is that of a mode no bigger than a word, just use that
1853 mode's store operation. */
1854 if (size <= UNITS_PER_WORD
1855 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1857 emit_move_insn (change_address (x, mode, NULL),
1858 gen_rtx_REG (mode, regno));
1859 return;
1862 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1863 to the left before storing to memory. Note that the previous test
1864 doesn't handle all cases (e.g. SIZE == 3). */
1865 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1867 rtx tem = operand_subword (x, 0, 1, BLKmode);
1868 rtx shift;
1870 if (tem == 0)
1871 abort ();
1873 shift = expand_shift (LSHIFT_EXPR, word_mode,
1874 gen_rtx_REG (word_mode, regno),
1875 build_int_2 ((UNITS_PER_WORD - size)
1876 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1877 emit_move_insn (tem, shift);
1878 return;
1881 /* See if the machine can do this with a store multiple insn. */
1882 #ifdef HAVE_store_multiple
1883 if (HAVE_store_multiple)
1885 last = get_last_insn ();
1886 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1887 GEN_INT (nregs));
1888 if (pat)
1890 emit_insn (pat);
1891 return;
1893 else
1894 delete_insns_since (last);
1896 #endif
1898 for (i = 0; i < nregs; i++)
1900 rtx tem = operand_subword (x, i, 1, BLKmode);
1902 if (tem == 0)
1903 abort ();
1905 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1909 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1910 registers represented by a PARALLEL. SSIZE represents the total size of
1911 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1912 SRC in bits. */
1913 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1914 the balance will be in what would be the low-order memory addresses, i.e.
1915 left justified for big endian, right justified for little endian. This
1916 happens to be true for the targets currently using this support. If this
1917 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1918 would be needed. */
1920 void
1921 emit_group_load (dst, orig_src, ssize, align)
1922 rtx dst, orig_src;
1923 unsigned int align;
1924 int ssize;
1926 rtx *tmps, src;
1927 int start, i;
1929 if (GET_CODE (dst) != PARALLEL)
1930 abort ();
1932 /* Check for a NULL entry, used to indicate that the parameter goes
1933 both on the stack and in registers. */
1934 if (XEXP (XVECEXP (dst, 0, 0), 0))
1935 start = 0;
1936 else
1937 start = 1;
1939 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1941 /* Process the pieces. */
1942 for (i = start; i < XVECLEN (dst, 0); i++)
1944 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1945 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1946 unsigned int bytelen = GET_MODE_SIZE (mode);
1947 int shift = 0;
1949 /* Handle trailing fragments that run over the size of the struct. */
1950 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1952 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1953 bytelen = ssize - bytepos;
1954 if (bytelen <= 0)
1955 abort ();
1958 /* If we won't be loading directly from memory, protect the real source
1959 from strange tricks we might play; but make sure that the source can
1960 be loaded directly into the destination. */
1961 src = orig_src;
1962 if (GET_CODE (orig_src) != MEM
1963 && (!CONSTANT_P (orig_src)
1964 || (GET_MODE (orig_src) != mode
1965 && GET_MODE (orig_src) != VOIDmode)))
1967 if (GET_MODE (orig_src) == VOIDmode)
1968 src = gen_reg_rtx (mode);
1969 else
1970 src = gen_reg_rtx (GET_MODE (orig_src));
1971 emit_move_insn (src, orig_src);
1974 /* Optimize the access just a bit. */
1975 if (GET_CODE (src) == MEM
1976 && align >= GET_MODE_ALIGNMENT (mode)
1977 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1978 && bytelen == GET_MODE_SIZE (mode))
1980 tmps[i] = gen_reg_rtx (mode);
1981 emit_move_insn (tmps[i],
1982 change_address (src, mode,
1983 plus_constant (XEXP (src, 0),
1984 bytepos)));
1986 else if (GET_CODE (src) == CONCAT)
1988 if (bytepos == 0
1989 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1990 tmps[i] = XEXP (src, 0);
1991 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1992 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1993 tmps[i] = XEXP (src, 1);
1994 else
1995 abort ();
1997 else if (CONSTANT_P (src)
1998 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
1999 tmps[i] = src;
2000 else
2001 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2002 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2003 mode, mode, align, ssize);
2005 if (BYTES_BIG_ENDIAN && shift)
2006 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2007 tmps[i], 0, OPTAB_WIDEN);
2010 emit_queue ();
2012 /* Copy the extracted pieces into the proper (probable) hard regs. */
2013 for (i = start; i < XVECLEN (dst, 0); i++)
2014 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2017 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2018 registers represented by a PARALLEL. SSIZE represents the total size of
2019 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2021 void
2022 emit_group_store (orig_dst, src, ssize, align)
2023 rtx orig_dst, src;
2024 int ssize;
2025 unsigned int align;
2027 rtx *tmps, dst;
2028 int start, i;
2030 if (GET_CODE (src) != PARALLEL)
2031 abort ();
2033 /* Check for a NULL entry, used to indicate that the parameter goes
2034 both on the stack and in registers. */
2035 if (XEXP (XVECEXP (src, 0, 0), 0))
2036 start = 0;
2037 else
2038 start = 1;
2040 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2042 /* Copy the (probable) hard regs into pseudos. */
2043 for (i = start; i < XVECLEN (src, 0); i++)
2045 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2046 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2047 emit_move_insn (tmps[i], reg);
2049 emit_queue ();
2051 /* If we won't be storing directly into memory, protect the real destination
2052 from strange tricks we might play. */
2053 dst = orig_dst;
2054 if (GET_CODE (dst) == PARALLEL)
2056 rtx temp;
2058 /* We can get a PARALLEL dst if there is a conditional expression in
2059 a return statement. In that case, the dst and src are the same,
2060 so no action is necessary. */
2061 if (rtx_equal_p (dst, src))
2062 return;
2064 /* It is unclear if we can ever reach here, but we may as well handle
2065 it. Allocate a temporary, and split this into a store/load to/from
2066 the temporary. */
2068 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2069 emit_group_store (temp, src, ssize, align);
2070 emit_group_load (dst, temp, ssize, align);
2071 return;
2073 else if (GET_CODE (dst) != MEM)
2075 dst = gen_reg_rtx (GET_MODE (orig_dst));
2076 /* Make life a bit easier for combine. */
2077 emit_move_insn (dst, const0_rtx);
2080 /* Process the pieces. */
2081 for (i = start; i < XVECLEN (src, 0); i++)
2083 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2084 enum machine_mode mode = GET_MODE (tmps[i]);
2085 unsigned int bytelen = GET_MODE_SIZE (mode);
2087 /* Handle trailing fragments that run over the size of the struct. */
2088 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2090 if (BYTES_BIG_ENDIAN)
2092 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2093 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2094 tmps[i], 0, OPTAB_WIDEN);
2096 bytelen = ssize - bytepos;
2099 /* Optimize the access just a bit. */
2100 if (GET_CODE (dst) == MEM
2101 && align >= GET_MODE_ALIGNMENT (mode)
2102 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2103 && bytelen == GET_MODE_SIZE (mode))
2104 emit_move_insn (change_address (dst, mode,
2105 plus_constant (XEXP (dst, 0),
2106 bytepos)),
2107 tmps[i]);
2108 else
2109 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2110 mode, tmps[i], align, ssize);
2113 emit_queue ();
2115 /* Copy from the pseudo into the (probable) hard reg. */
2116 if (GET_CODE (dst) == REG)
2117 emit_move_insn (orig_dst, dst);
2120 /* Generate code to copy a BLKmode object of TYPE out of a
2121 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2122 is null, a stack temporary is created. TGTBLK is returned.
2124 The primary purpose of this routine is to handle functions
2125 that return BLKmode structures in registers. Some machines
2126 (the PA for example) want to return all small structures
2127 in registers regardless of the structure's alignment. */
2130 copy_blkmode_from_reg (tgtblk, srcreg, type)
2131 rtx tgtblk;
2132 rtx srcreg;
2133 tree type;
2135 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2136 rtx src = NULL, dst = NULL;
2137 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2138 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2140 if (tgtblk == 0)
2142 tgtblk = assign_temp (build_qualified_type (type,
2143 (TYPE_QUALS (type)
2144 | TYPE_QUAL_CONST)),
2145 0, 1, 1);
2146 preserve_temp_slots (tgtblk);
2149 /* This code assumes srcreg is at least a full word. If it isn't,
2150 copy it into a new pseudo which is a full word. */
2151 if (GET_MODE (srcreg) != BLKmode
2152 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2153 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2155 /* Structures whose size is not a multiple of a word are aligned
2156 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2157 machine, this means we must skip the empty high order bytes when
2158 calculating the bit offset. */
2159 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2160 big_endian_correction
2161 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2163 /* Copy the structure BITSIZE bites at a time.
2165 We could probably emit more efficient code for machines which do not use
2166 strict alignment, but it doesn't seem worth the effort at the current
2167 time. */
2168 for (bitpos = 0, xbitpos = big_endian_correction;
2169 bitpos < bytes * BITS_PER_UNIT;
2170 bitpos += bitsize, xbitpos += bitsize)
2172 /* We need a new source operand each time xbitpos is on a
2173 word boundary and when xbitpos == big_endian_correction
2174 (the first time through). */
2175 if (xbitpos % BITS_PER_WORD == 0
2176 || xbitpos == big_endian_correction)
2177 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, BLKmode);
2179 /* We need a new destination operand each time bitpos is on
2180 a word boundary. */
2181 if (bitpos % BITS_PER_WORD == 0)
2182 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2184 /* Use xbitpos for the source extraction (right justified) and
2185 xbitpos for the destination store (left justified). */
2186 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2187 extract_bit_field (src, bitsize,
2188 xbitpos % BITS_PER_WORD, 1,
2189 NULL_RTX, word_mode, word_mode,
2190 bitsize, BITS_PER_WORD),
2191 bitsize, BITS_PER_WORD);
2194 return tgtblk;
2197 /* Add a USE expression for REG to the (possibly empty) list pointed
2198 to by CALL_FUSAGE. REG must denote a hard register. */
2200 void
2201 use_reg (call_fusage, reg)
2202 rtx *call_fusage, reg;
2204 if (GET_CODE (reg) != REG
2205 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2206 abort ();
2208 *call_fusage
2209 = gen_rtx_EXPR_LIST (VOIDmode,
2210 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2213 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2214 starting at REGNO. All of these registers must be hard registers. */
2216 void
2217 use_regs (call_fusage, regno, nregs)
2218 rtx *call_fusage;
2219 int regno;
2220 int nregs;
2222 int i;
2224 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2225 abort ();
2227 for (i = 0; i < nregs; i++)
2228 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2231 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2232 PARALLEL REGS. This is for calls that pass values in multiple
2233 non-contiguous locations. The Irix 6 ABI has examples of this. */
2235 void
2236 use_group_regs (call_fusage, regs)
2237 rtx *call_fusage;
2238 rtx regs;
2240 int i;
2242 for (i = 0; i < XVECLEN (regs, 0); i++)
2244 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2246 /* A NULL entry means the parameter goes both on the stack and in
2247 registers. This can also be a MEM for targets that pass values
2248 partially on the stack and partially in registers. */
2249 if (reg != 0 && GET_CODE (reg) == REG)
2250 use_reg (call_fusage, reg);
2256 can_store_by_pieces (len, constfun, constfundata, align)
2257 unsigned HOST_WIDE_INT len;
2258 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2259 PTR constfundata;
2260 unsigned int align;
2262 unsigned HOST_WIDE_INT max_size, l;
2263 HOST_WIDE_INT offset = 0;
2264 enum machine_mode mode, tmode;
2265 enum insn_code icode;
2266 int reverse;
2267 rtx cst;
2269 if (! MOVE_BY_PIECES_P (len, align))
2270 return 0;
2272 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2273 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2274 align = MOVE_MAX * BITS_PER_UNIT;
2276 /* We would first store what we can in the largest integer mode, then go to
2277 successively smaller modes. */
2279 for (reverse = 0;
2280 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2281 reverse++)
2283 l = len;
2284 mode = VOIDmode;
2285 max_size = MOVE_MAX_PIECES + 1;
2286 while (max_size > 1)
2288 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2289 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2290 if (GET_MODE_SIZE (tmode) < max_size)
2291 mode = tmode;
2293 if (mode == VOIDmode)
2294 break;
2296 icode = mov_optab->handlers[(int) mode].insn_code;
2297 if (icode != CODE_FOR_nothing
2298 && align >= GET_MODE_ALIGNMENT (mode))
2300 unsigned int size = GET_MODE_SIZE (mode);
2302 while (l >= size)
2304 if (reverse)
2305 offset -= size;
2307 cst = (*constfun) (constfundata, offset, mode);
2308 if (!LEGITIMATE_CONSTANT_P (cst))
2309 return 0;
2311 if (!reverse)
2312 offset += size;
2314 l -= size;
2318 max_size = GET_MODE_SIZE (mode);
2321 /* The code above should have handled everything. */
2322 if (l != 0)
2323 abort ();
2326 return 1;
2329 /* Generate several move instructions to store LEN bytes generated by
2330 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2331 pointer which will be passed as argument in every CONSTFUN call.
2332 ALIGN is maximum alignment we can assume. */
2334 void
2335 store_by_pieces (to, len, constfun, constfundata, align)
2336 rtx to;
2337 unsigned HOST_WIDE_INT len;
2338 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2339 PTR constfundata;
2340 unsigned int align;
2342 struct store_by_pieces data;
2344 if (! MOVE_BY_PIECES_P (len, align))
2345 abort ();
2346 to = protect_from_queue (to, 1);
2347 data.constfun = constfun;
2348 data.constfundata = constfundata;
2349 data.len = len;
2350 data.to = to;
2351 store_by_pieces_1 (&data, align);
2354 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2355 rtx with BLKmode). The caller must pass TO through protect_from_queue
2356 before calling. ALIGN is maximum alignment we can assume. */
2358 static void
2359 clear_by_pieces (to, len, align)
2360 rtx to;
2361 unsigned HOST_WIDE_INT len;
2362 unsigned int align;
2364 struct store_by_pieces data;
2366 data.constfun = clear_by_pieces_1;
2367 data.constfundata = NULL_PTR;
2368 data.len = len;
2369 data.to = to;
2370 store_by_pieces_1 (&data, align);
2373 /* Callback routine for clear_by_pieces.
2374 Return const0_rtx unconditionally. */
2376 static rtx
2377 clear_by_pieces_1 (data, offset, mode)
2378 PTR data ATTRIBUTE_UNUSED;
2379 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2380 enum machine_mode mode ATTRIBUTE_UNUSED;
2382 return const0_rtx;
2385 /* Subroutine of clear_by_pieces and store_by_pieces.
2386 Generate several move instructions to store LEN bytes of block TO. (A MEM
2387 rtx with BLKmode). The caller must pass TO through protect_from_queue
2388 before calling. ALIGN is maximum alignment we can assume. */
2390 static void
2391 store_by_pieces_1 (data, align)
2392 struct store_by_pieces *data;
2393 unsigned int align;
2395 rtx to_addr = XEXP (data->to, 0);
2396 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2397 enum machine_mode mode = VOIDmode, tmode;
2398 enum insn_code icode;
2400 data->offset = 0;
2401 data->to_addr = to_addr;
2402 data->autinc_to
2403 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2404 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2406 data->explicit_inc_to = 0;
2407 data->reverse
2408 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2409 if (data->reverse)
2410 data->offset = data->len;
2412 /* If storing requires more than two move insns,
2413 copy addresses to registers (to make displacements shorter)
2414 and use post-increment if available. */
2415 if (!data->autinc_to
2416 && move_by_pieces_ninsns (data->len, align) > 2)
2418 /* Determine the main mode we'll be using. */
2419 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2420 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2421 if (GET_MODE_SIZE (tmode) < max_size)
2422 mode = tmode;
2424 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2426 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2427 data->autinc_to = 1;
2428 data->explicit_inc_to = -1;
2431 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2432 && ! data->autinc_to)
2434 data->to_addr = copy_addr_to_reg (to_addr);
2435 data->autinc_to = 1;
2436 data->explicit_inc_to = 1;
2439 if ( !data->autinc_to && CONSTANT_P (to_addr))
2440 data->to_addr = copy_addr_to_reg (to_addr);
2443 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2444 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2445 align = MOVE_MAX * BITS_PER_UNIT;
2447 /* First store what we can in the largest integer mode, then go to
2448 successively smaller modes. */
2450 while (max_size > 1)
2452 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2453 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2454 if (GET_MODE_SIZE (tmode) < max_size)
2455 mode = tmode;
2457 if (mode == VOIDmode)
2458 break;
2460 icode = mov_optab->handlers[(int) mode].insn_code;
2461 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2462 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2464 max_size = GET_MODE_SIZE (mode);
2467 /* The code above should have handled everything. */
2468 if (data->len != 0)
2469 abort ();
2472 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2473 with move instructions for mode MODE. GENFUN is the gen_... function
2474 to make a move insn for that mode. DATA has all the other info. */
2476 static void
2477 store_by_pieces_2 (genfun, mode, data)
2478 rtx (*genfun) PARAMS ((rtx, ...));
2479 enum machine_mode mode;
2480 struct store_by_pieces *data;
2482 unsigned int size = GET_MODE_SIZE (mode);
2483 rtx to1, cst;
2485 while (data->len >= size)
2487 if (data->reverse)
2488 data->offset -= size;
2490 if (data->autinc_to)
2492 to1 = gen_rtx_MEM (mode, data->to_addr);
2493 MEM_COPY_ATTRIBUTES (to1, data->to);
2495 else
2496 to1 = change_address (data->to, mode,
2497 plus_constant (data->to_addr, data->offset));
2499 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2500 emit_insn (gen_add2_insn (data->to_addr,
2501 GEN_INT (-(HOST_WIDE_INT) size)));
2503 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2504 emit_insn ((*genfun) (to1, cst));
2506 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2507 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2509 if (! data->reverse)
2510 data->offset += size;
2512 data->len -= size;
2516 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2517 its length in bytes and ALIGN is the maximum alignment we can is has.
2519 If we call a function that returns the length of the block, return it. */
2522 clear_storage (object, size, align)
2523 rtx object;
2524 rtx size;
2525 unsigned int align;
2527 #ifdef TARGET_MEM_FUNCTIONS
2528 static tree fn;
2529 tree call_expr, arg_list;
2530 #endif
2531 rtx retval = 0;
2533 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2534 just move a zero. Otherwise, do this a piece at a time. */
2535 if (GET_MODE (object) != BLKmode
2536 && GET_CODE (size) == CONST_INT
2537 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2538 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2539 else
2541 object = protect_from_queue (object, 1);
2542 size = protect_from_queue (size, 0);
2544 if (GET_CODE (size) == CONST_INT
2545 && MOVE_BY_PIECES_P (INTVAL (size), align))
2546 clear_by_pieces (object, INTVAL (size), align);
2547 else
2549 /* Try the most limited insn first, because there's no point
2550 including more than one in the machine description unless
2551 the more limited one has some advantage. */
2553 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2554 enum machine_mode mode;
2556 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2557 mode = GET_MODE_WIDER_MODE (mode))
2559 enum insn_code code = clrstr_optab[(int) mode];
2560 insn_operand_predicate_fn pred;
2562 if (code != CODE_FOR_nothing
2563 /* We don't need MODE to be narrower than
2564 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2565 the mode mask, as it is returned by the macro, it will
2566 definitely be less than the actual mode mask. */
2567 && ((GET_CODE (size) == CONST_INT
2568 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2569 <= (GET_MODE_MASK (mode) >> 1)))
2570 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2571 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2572 || (*pred) (object, BLKmode))
2573 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2574 || (*pred) (opalign, VOIDmode)))
2576 rtx op1;
2577 rtx last = get_last_insn ();
2578 rtx pat;
2580 op1 = convert_to_mode (mode, size, 1);
2581 pred = insn_data[(int) code].operand[1].predicate;
2582 if (pred != 0 && ! (*pred) (op1, mode))
2583 op1 = copy_to_mode_reg (mode, op1);
2585 pat = GEN_FCN ((int) code) (object, op1, opalign);
2586 if (pat)
2588 emit_insn (pat);
2589 return 0;
2591 else
2592 delete_insns_since (last);
2596 /* OBJECT or SIZE may have been passed through protect_from_queue.
2598 It is unsafe to save the value generated by protect_from_queue
2599 and reuse it later. Consider what happens if emit_queue is
2600 called before the return value from protect_from_queue is used.
2602 Expansion of the CALL_EXPR below will call emit_queue before
2603 we are finished emitting RTL for argument setup. So if we are
2604 not careful we could get the wrong value for an argument.
2606 To avoid this problem we go ahead and emit code to copy OBJECT
2607 and SIZE into new pseudos. We can then place those new pseudos
2608 into an RTL_EXPR and use them later, even after a call to
2609 emit_queue.
2611 Note this is not strictly needed for library calls since they
2612 do not call emit_queue before loading their arguments. However,
2613 we may need to have library calls call emit_queue in the future
2614 since failing to do so could cause problems for targets which
2615 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2616 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2618 #ifdef TARGET_MEM_FUNCTIONS
2619 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2620 #else
2621 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2622 TREE_UNSIGNED (integer_type_node));
2623 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2624 #endif
2626 #ifdef TARGET_MEM_FUNCTIONS
2627 /* It is incorrect to use the libcall calling conventions to call
2628 memset in this context.
2630 This could be a user call to memset and the user may wish to
2631 examine the return value from memset.
2633 For targets where libcalls and normal calls have different
2634 conventions for returning pointers, we could end up generating
2635 incorrect code.
2637 So instead of using a libcall sequence we build up a suitable
2638 CALL_EXPR and expand the call in the normal fashion. */
2639 if (fn == NULL_TREE)
2641 tree fntype;
2643 /* This was copied from except.c, I don't know if all this is
2644 necessary in this context or not. */
2645 fn = get_identifier ("memset");
2646 fntype = build_pointer_type (void_type_node);
2647 fntype = build_function_type (fntype, NULL_TREE);
2648 fn = build_decl (FUNCTION_DECL, fn, fntype);
2649 ggc_add_tree_root (&fn, 1);
2650 DECL_EXTERNAL (fn) = 1;
2651 TREE_PUBLIC (fn) = 1;
2652 DECL_ARTIFICIAL (fn) = 1;
2653 make_decl_rtl (fn, NULL_PTR);
2654 assemble_external (fn);
2657 /* We need to make an argument list for the function call.
2659 memset has three arguments, the first is a void * addresses, the
2660 second a integer with the initialization value, the last is a
2661 size_t byte count for the copy. */
2662 arg_list
2663 = build_tree_list (NULL_TREE,
2664 make_tree (build_pointer_type (void_type_node),
2665 object));
2666 TREE_CHAIN (arg_list)
2667 = build_tree_list (NULL_TREE,
2668 make_tree (integer_type_node, const0_rtx));
2669 TREE_CHAIN (TREE_CHAIN (arg_list))
2670 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2671 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2673 /* Now we have to build up the CALL_EXPR itself. */
2674 call_expr = build1 (ADDR_EXPR,
2675 build_pointer_type (TREE_TYPE (fn)), fn);
2676 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2677 call_expr, arg_list, NULL_TREE);
2678 TREE_SIDE_EFFECTS (call_expr) = 1;
2680 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2681 #else
2682 emit_library_call (bzero_libfunc, LCT_NORMAL,
2683 VOIDmode, 2, object, Pmode, size,
2684 TYPE_MODE (integer_type_node));
2685 #endif
2689 return retval;
2692 /* Generate code to copy Y into X.
2693 Both Y and X must have the same mode, except that
2694 Y can be a constant with VOIDmode.
2695 This mode cannot be BLKmode; use emit_block_move for that.
2697 Return the last instruction emitted. */
2700 emit_move_insn (x, y)
2701 rtx x, y;
2703 enum machine_mode mode = GET_MODE (x);
2704 rtx y_cst = NULL_RTX;
2705 rtx last_insn;
2707 x = protect_from_queue (x, 1);
2708 y = protect_from_queue (y, 0);
2710 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2711 abort ();
2713 /* Never force constant_p_rtx to memory. */
2714 if (GET_CODE (y) == CONSTANT_P_RTX)
2716 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2718 y_cst = y;
2719 y = force_const_mem (mode, y);
2722 /* If X or Y are memory references, verify that their addresses are valid
2723 for the machine. */
2724 if (GET_CODE (x) == MEM
2725 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2726 && ! push_operand (x, GET_MODE (x)))
2727 || (flag_force_addr
2728 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2729 x = change_address (x, VOIDmode, XEXP (x, 0));
2731 if (GET_CODE (y) == MEM
2732 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2733 || (flag_force_addr
2734 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2735 y = change_address (y, VOIDmode, XEXP (y, 0));
2737 if (mode == BLKmode)
2738 abort ();
2740 last_insn = emit_move_insn_1 (x, y);
2742 if (y_cst && GET_CODE (x) == REG)
2743 REG_NOTES (last_insn)
2744 = gen_rtx_EXPR_LIST (REG_EQUAL, y_cst, REG_NOTES (last_insn));
2746 return last_insn;
2749 /* Low level part of emit_move_insn.
2750 Called just like emit_move_insn, but assumes X and Y
2751 are basically valid. */
2754 emit_move_insn_1 (x, y)
2755 rtx x, y;
2757 enum machine_mode mode = GET_MODE (x);
2758 enum machine_mode submode;
2759 enum mode_class class = GET_MODE_CLASS (mode);
2760 unsigned int i;
2762 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2763 abort ();
2765 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2766 return
2767 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2769 /* Expand complex moves by moving real part and imag part, if possible. */
2770 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2771 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2772 * BITS_PER_UNIT),
2773 (class == MODE_COMPLEX_INT
2774 ? MODE_INT : MODE_FLOAT),
2776 && (mov_optab->handlers[(int) submode].insn_code
2777 != CODE_FOR_nothing))
2779 /* Don't split destination if it is a stack push. */
2780 int stack = push_operand (x, GET_MODE (x));
2782 /* If this is a stack, push the highpart first, so it
2783 will be in the argument order.
2785 In that case, change_address is used only to convert
2786 the mode, not to change the address. */
2787 if (stack)
2789 /* Note that the real part always precedes the imag part in memory
2790 regardless of machine's endianness. */
2791 #ifdef STACK_GROWS_DOWNWARD
2792 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2793 (gen_rtx_MEM (submode, XEXP (x, 0)),
2794 gen_imagpart (submode, y)));
2795 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2796 (gen_rtx_MEM (submode, XEXP (x, 0)),
2797 gen_realpart (submode, y)));
2798 #else
2799 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2800 (gen_rtx_MEM (submode, XEXP (x, 0)),
2801 gen_realpart (submode, y)));
2802 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2803 (gen_rtx_MEM (submode, XEXP (x, 0)),
2804 gen_imagpart (submode, y)));
2805 #endif
2807 else
2809 rtx realpart_x, realpart_y;
2810 rtx imagpart_x, imagpart_y;
2812 /* If this is a complex value with each part being smaller than a
2813 word, the usual calling sequence will likely pack the pieces into
2814 a single register. Unfortunately, SUBREG of hard registers only
2815 deals in terms of words, so we have a problem converting input
2816 arguments to the CONCAT of two registers that is used elsewhere
2817 for complex values. If this is before reload, we can copy it into
2818 memory and reload. FIXME, we should see about using extract and
2819 insert on integer registers, but complex short and complex char
2820 variables should be rarely used. */
2821 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2822 && (reload_in_progress | reload_completed) == 0)
2824 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2825 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2827 if (packed_dest_p || packed_src_p)
2829 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2830 ? MODE_FLOAT : MODE_INT);
2832 enum machine_mode reg_mode
2833 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2835 if (reg_mode != BLKmode)
2837 rtx mem = assign_stack_temp (reg_mode,
2838 GET_MODE_SIZE (mode), 0);
2839 rtx cmem = change_address (mem, mode, NULL_RTX);
2841 cfun->cannot_inline
2842 = N_("function using short complex types cannot be inline");
2844 if (packed_dest_p)
2846 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2847 emit_move_insn_1 (cmem, y);
2848 return emit_move_insn_1 (sreg, mem);
2850 else
2852 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2853 emit_move_insn_1 (mem, sreg);
2854 return emit_move_insn_1 (x, cmem);
2860 realpart_x = gen_realpart (submode, x);
2861 realpart_y = gen_realpart (submode, y);
2862 imagpart_x = gen_imagpart (submode, x);
2863 imagpart_y = gen_imagpart (submode, y);
2865 /* Show the output dies here. This is necessary for SUBREGs
2866 of pseudos since we cannot track their lifetimes correctly;
2867 hard regs shouldn't appear here except as return values.
2868 We never want to emit such a clobber after reload. */
2869 if (x != y
2870 && ! (reload_in_progress || reload_completed)
2871 && (GET_CODE (realpart_x) == SUBREG
2872 || GET_CODE (imagpart_x) == SUBREG))
2874 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2877 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2878 (realpart_x, realpart_y));
2879 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2880 (imagpart_x, imagpart_y));
2883 return get_last_insn ();
2886 /* This will handle any multi-word mode that lacks a move_insn pattern.
2887 However, you will get better code if you define such patterns,
2888 even if they must turn into multiple assembler instructions. */
2889 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2891 rtx last_insn = 0;
2892 rtx seq, inner;
2893 int need_clobber;
2895 #ifdef PUSH_ROUNDING
2897 /* If X is a push on the stack, do the push now and replace
2898 X with a reference to the stack pointer. */
2899 if (push_operand (x, GET_MODE (x)))
2901 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2902 x = change_address (x, VOIDmode, stack_pointer_rtx);
2904 #endif
2906 /* If we are in reload, see if either operand is a MEM whose address
2907 is scheduled for replacement. */
2908 if (reload_in_progress && GET_CODE (x) == MEM
2909 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2911 rtx new = gen_rtx_MEM (GET_MODE (x), inner);
2913 MEM_COPY_ATTRIBUTES (new, x);
2914 x = new;
2916 if (reload_in_progress && GET_CODE (y) == MEM
2917 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2919 rtx new = gen_rtx_MEM (GET_MODE (y), inner);
2921 MEM_COPY_ATTRIBUTES (new, y);
2922 y = new;
2925 start_sequence ();
2927 need_clobber = 0;
2928 for (i = 0;
2929 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2930 i++)
2932 rtx xpart = operand_subword (x, i, 1, mode);
2933 rtx ypart = operand_subword (y, i, 1, mode);
2935 /* If we can't get a part of Y, put Y into memory if it is a
2936 constant. Otherwise, force it into a register. If we still
2937 can't get a part of Y, abort. */
2938 if (ypart == 0 && CONSTANT_P (y))
2940 y = force_const_mem (mode, y);
2941 ypart = operand_subword (y, i, 1, mode);
2943 else if (ypart == 0)
2944 ypart = operand_subword_force (y, i, mode);
2946 if (xpart == 0 || ypart == 0)
2947 abort ();
2949 need_clobber |= (GET_CODE (xpart) == SUBREG);
2951 last_insn = emit_move_insn (xpart, ypart);
2954 seq = gen_sequence ();
2955 end_sequence ();
2957 /* Show the output dies here. This is necessary for SUBREGs
2958 of pseudos since we cannot track their lifetimes correctly;
2959 hard regs shouldn't appear here except as return values.
2960 We never want to emit such a clobber after reload. */
2961 if (x != y
2962 && ! (reload_in_progress || reload_completed)
2963 && need_clobber != 0)
2965 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2968 emit_insn (seq);
2970 return last_insn;
2972 else
2973 abort ();
2976 /* Pushing data onto the stack. */
2978 /* Push a block of length SIZE (perhaps variable)
2979 and return an rtx to address the beginning of the block.
2980 Note that it is not possible for the value returned to be a QUEUED.
2981 The value may be virtual_outgoing_args_rtx.
2983 EXTRA is the number of bytes of padding to push in addition to SIZE.
2984 BELOW nonzero means this padding comes at low addresses;
2985 otherwise, the padding comes at high addresses. */
2988 push_block (size, extra, below)
2989 rtx size;
2990 int extra, below;
2992 register rtx temp;
2994 size = convert_modes (Pmode, ptr_mode, size, 1);
2995 if (CONSTANT_P (size))
2996 anti_adjust_stack (plus_constant (size, extra));
2997 else if (GET_CODE (size) == REG && extra == 0)
2998 anti_adjust_stack (size);
2999 else
3001 temp = copy_to_mode_reg (Pmode, size);
3002 if (extra != 0)
3003 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3004 temp, 0, OPTAB_LIB_WIDEN);
3005 anti_adjust_stack (temp);
3008 #ifndef STACK_GROWS_DOWNWARD
3009 #ifdef ARGS_GROW_DOWNWARD
3010 if (!ACCUMULATE_OUTGOING_ARGS)
3011 #else
3012 if (0)
3013 #endif
3014 #else
3015 if (1)
3016 #endif
3018 /* Return the lowest stack address when STACK or ARGS grow downward and
3019 we are not aaccumulating outgoing arguments (the c4x port uses such
3020 conventions). */
3021 temp = virtual_outgoing_args_rtx;
3022 if (extra != 0 && below)
3023 temp = plus_constant (temp, extra);
3025 else
3027 if (GET_CODE (size) == CONST_INT)
3028 temp = plus_constant (virtual_outgoing_args_rtx,
3029 -INTVAL (size) - (below ? 0 : extra));
3030 else if (extra != 0 && !below)
3031 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3032 negate_rtx (Pmode, plus_constant (size, extra)));
3033 else
3034 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3035 negate_rtx (Pmode, size));
3038 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3042 gen_push_operand ()
3044 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3047 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
3048 block of SIZE bytes. */
3050 static rtx
3051 get_push_address (size)
3052 int size;
3054 register rtx temp;
3056 if (STACK_PUSH_CODE == POST_DEC)
3057 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3058 else if (STACK_PUSH_CODE == POST_INC)
3059 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3060 else
3061 temp = stack_pointer_rtx;
3063 return copy_to_reg (temp);
3066 /* Generate code to push X onto the stack, assuming it has mode MODE and
3067 type TYPE.
3068 MODE is redundant except when X is a CONST_INT (since they don't
3069 carry mode info).
3070 SIZE is an rtx for the size of data to be copied (in bytes),
3071 needed only if X is BLKmode.
3073 ALIGN (in bits) is maximum alignment we can assume.
3075 If PARTIAL and REG are both nonzero, then copy that many of the first
3076 words of X into registers starting with REG, and push the rest of X.
3077 The amount of space pushed is decreased by PARTIAL words,
3078 rounded *down* to a multiple of PARM_BOUNDARY.
3079 REG must be a hard register in this case.
3080 If REG is zero but PARTIAL is not, take any all others actions for an
3081 argument partially in registers, but do not actually load any
3082 registers.
3084 EXTRA is the amount in bytes of extra space to leave next to this arg.
3085 This is ignored if an argument block has already been allocated.
3087 On a machine that lacks real push insns, ARGS_ADDR is the address of
3088 the bottom of the argument block for this call. We use indexing off there
3089 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3090 argument block has not been preallocated.
3092 ARGS_SO_FAR is the size of args previously pushed for this call.
3094 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3095 for arguments passed in registers. If nonzero, it will be the number
3096 of bytes required. */
3098 void
3099 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3100 args_addr, args_so_far, reg_parm_stack_space,
3101 alignment_pad)
3102 register rtx x;
3103 enum machine_mode mode;
3104 tree type;
3105 rtx size;
3106 unsigned int align;
3107 int partial;
3108 rtx reg;
3109 int extra;
3110 rtx args_addr;
3111 rtx args_so_far;
3112 int reg_parm_stack_space;
3113 rtx alignment_pad;
3115 rtx xinner;
3116 enum direction stack_direction
3117 #ifdef STACK_GROWS_DOWNWARD
3118 = downward;
3119 #else
3120 = upward;
3121 #endif
3123 /* Decide where to pad the argument: `downward' for below,
3124 `upward' for above, or `none' for don't pad it.
3125 Default is below for small data on big-endian machines; else above. */
3126 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3128 /* Invert direction if stack is post-update. */
3129 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
3130 if (where_pad != none)
3131 where_pad = (where_pad == downward ? upward : downward);
3133 xinner = x = protect_from_queue (x, 0);
3135 if (mode == BLKmode)
3137 /* Copy a block into the stack, entirely or partially. */
3139 register rtx temp;
3140 int used = partial * UNITS_PER_WORD;
3141 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3142 int skip;
3144 if (size == 0)
3145 abort ();
3147 used -= offset;
3149 /* USED is now the # of bytes we need not copy to the stack
3150 because registers will take care of them. */
3152 if (partial != 0)
3153 xinner = change_address (xinner, BLKmode,
3154 plus_constant (XEXP (xinner, 0), used));
3156 /* If the partial register-part of the arg counts in its stack size,
3157 skip the part of stack space corresponding to the registers.
3158 Otherwise, start copying to the beginning of the stack space,
3159 by setting SKIP to 0. */
3160 skip = (reg_parm_stack_space == 0) ? 0 : used;
3162 #ifdef PUSH_ROUNDING
3163 /* Do it with several push insns if that doesn't take lots of insns
3164 and if there is no difficulty with push insns that skip bytes
3165 on the stack for alignment purposes. */
3166 if (args_addr == 0
3167 && PUSH_ARGS
3168 && GET_CODE (size) == CONST_INT
3169 && skip == 0
3170 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3171 /* Here we avoid the case of a structure whose weak alignment
3172 forces many pushes of a small amount of data,
3173 and such small pushes do rounding that causes trouble. */
3174 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3175 || align >= BIGGEST_ALIGNMENT
3176 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3177 == (align / BITS_PER_UNIT)))
3178 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3180 /* Push padding now if padding above and stack grows down,
3181 or if padding below and stack grows up.
3182 But if space already allocated, this has already been done. */
3183 if (extra && args_addr == 0
3184 && where_pad != none && where_pad != stack_direction)
3185 anti_adjust_stack (GEN_INT (extra));
3187 stack_pointer_delta += INTVAL (size) - used;
3188 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
3189 INTVAL (size) - used, align);
3191 if (current_function_check_memory_usage && ! in_check_memory_usage)
3193 rtx temp;
3195 in_check_memory_usage = 1;
3196 temp = get_push_address (INTVAL (size) - used);
3197 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3198 emit_library_call (chkr_copy_bitmap_libfunc,
3199 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3200 Pmode, XEXP (xinner, 0), Pmode,
3201 GEN_INT (INTVAL (size) - used),
3202 TYPE_MODE (sizetype));
3203 else
3204 emit_library_call (chkr_set_right_libfunc,
3205 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3206 Pmode, GEN_INT (INTVAL (size) - used),
3207 TYPE_MODE (sizetype),
3208 GEN_INT (MEMORY_USE_RW),
3209 TYPE_MODE (integer_type_node));
3210 in_check_memory_usage = 0;
3213 else
3214 #endif /* PUSH_ROUNDING */
3216 rtx target;
3218 /* Otherwise make space on the stack and copy the data
3219 to the address of that space. */
3221 /* Deduct words put into registers from the size we must copy. */
3222 if (partial != 0)
3224 if (GET_CODE (size) == CONST_INT)
3225 size = GEN_INT (INTVAL (size) - used);
3226 else
3227 size = expand_binop (GET_MODE (size), sub_optab, size,
3228 GEN_INT (used), NULL_RTX, 0,
3229 OPTAB_LIB_WIDEN);
3232 /* Get the address of the stack space.
3233 In this case, we do not deal with EXTRA separately.
3234 A single stack adjust will do. */
3235 if (! args_addr)
3237 temp = push_block (size, extra, where_pad == downward);
3238 extra = 0;
3240 else if (GET_CODE (args_so_far) == CONST_INT)
3241 temp = memory_address (BLKmode,
3242 plus_constant (args_addr,
3243 skip + INTVAL (args_so_far)));
3244 else
3245 temp = memory_address (BLKmode,
3246 plus_constant (gen_rtx_PLUS (Pmode,
3247 args_addr,
3248 args_so_far),
3249 skip));
3250 if (current_function_check_memory_usage && ! in_check_memory_usage)
3252 in_check_memory_usage = 1;
3253 target = copy_to_reg (temp);
3254 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3255 emit_library_call (chkr_copy_bitmap_libfunc,
3256 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3257 target, Pmode,
3258 XEXP (xinner, 0), Pmode,
3259 size, TYPE_MODE (sizetype));
3260 else
3261 emit_library_call (chkr_set_right_libfunc,
3262 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3263 target, Pmode,
3264 size, TYPE_MODE (sizetype),
3265 GEN_INT (MEMORY_USE_RW),
3266 TYPE_MODE (integer_type_node));
3267 in_check_memory_usage = 0;
3270 target = gen_rtx_MEM (BLKmode, temp);
3272 if (type != 0)
3274 set_mem_attributes (target, type, 1);
3275 /* Function incoming arguments may overlap with sibling call
3276 outgoing arguments and we cannot allow reordering of reads
3277 from function arguments with stores to outgoing arguments
3278 of sibling calls. */
3279 MEM_ALIAS_SET (target) = 0;
3282 /* TEMP is the address of the block. Copy the data there. */
3283 if (GET_CODE (size) == CONST_INT
3284 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3286 move_by_pieces (target, xinner, INTVAL (size), align);
3287 goto ret;
3289 else
3291 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3292 enum machine_mode mode;
3294 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3295 mode != VOIDmode;
3296 mode = GET_MODE_WIDER_MODE (mode))
3298 enum insn_code code = movstr_optab[(int) mode];
3299 insn_operand_predicate_fn pred;
3301 if (code != CODE_FOR_nothing
3302 && ((GET_CODE (size) == CONST_INT
3303 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3304 <= (GET_MODE_MASK (mode) >> 1)))
3305 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3306 && (!(pred = insn_data[(int) code].operand[0].predicate)
3307 || ((*pred) (target, BLKmode)))
3308 && (!(pred = insn_data[(int) code].operand[1].predicate)
3309 || ((*pred) (xinner, BLKmode)))
3310 && (!(pred = insn_data[(int) code].operand[3].predicate)
3311 || ((*pred) (opalign, VOIDmode))))
3313 rtx op2 = convert_to_mode (mode, size, 1);
3314 rtx last = get_last_insn ();
3315 rtx pat;
3317 pred = insn_data[(int) code].operand[2].predicate;
3318 if (pred != 0 && ! (*pred) (op2, mode))
3319 op2 = copy_to_mode_reg (mode, op2);
3321 pat = GEN_FCN ((int) code) (target, xinner,
3322 op2, opalign);
3323 if (pat)
3325 emit_insn (pat);
3326 goto ret;
3328 else
3329 delete_insns_since (last);
3334 if (!ACCUMULATE_OUTGOING_ARGS)
3336 /* If the source is referenced relative to the stack pointer,
3337 copy it to another register to stabilize it. We do not need
3338 to do this if we know that we won't be changing sp. */
3340 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3341 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3342 temp = copy_to_reg (temp);
3345 /* Make inhibit_defer_pop nonzero around the library call
3346 to force it to pop the bcopy-arguments right away. */
3347 NO_DEFER_POP;
3348 #ifdef TARGET_MEM_FUNCTIONS
3349 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3350 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3351 convert_to_mode (TYPE_MODE (sizetype),
3352 size, TREE_UNSIGNED (sizetype)),
3353 TYPE_MODE (sizetype));
3354 #else
3355 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3356 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3357 convert_to_mode (TYPE_MODE (integer_type_node),
3358 size,
3359 TREE_UNSIGNED (integer_type_node)),
3360 TYPE_MODE (integer_type_node));
3361 #endif
3362 OK_DEFER_POP;
3365 else if (partial > 0)
3367 /* Scalar partly in registers. */
3369 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3370 int i;
3371 int not_stack;
3372 /* # words of start of argument
3373 that we must make space for but need not store. */
3374 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3375 int args_offset = INTVAL (args_so_far);
3376 int skip;
3378 /* Push padding now if padding above and stack grows down,
3379 or if padding below and stack grows up.
3380 But if space already allocated, this has already been done. */
3381 if (extra && args_addr == 0
3382 && where_pad != none && where_pad != stack_direction)
3383 anti_adjust_stack (GEN_INT (extra));
3385 /* If we make space by pushing it, we might as well push
3386 the real data. Otherwise, we can leave OFFSET nonzero
3387 and leave the space uninitialized. */
3388 if (args_addr == 0)
3389 offset = 0;
3391 /* Now NOT_STACK gets the number of words that we don't need to
3392 allocate on the stack. */
3393 not_stack = partial - offset;
3395 /* If the partial register-part of the arg counts in its stack size,
3396 skip the part of stack space corresponding to the registers.
3397 Otherwise, start copying to the beginning of the stack space,
3398 by setting SKIP to 0. */
3399 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3401 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3402 x = validize_mem (force_const_mem (mode, x));
3404 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3405 SUBREGs of such registers are not allowed. */
3406 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3407 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3408 x = copy_to_reg (x);
3410 /* Loop over all the words allocated on the stack for this arg. */
3411 /* We can do it by words, because any scalar bigger than a word
3412 has a size a multiple of a word. */
3413 #ifndef PUSH_ARGS_REVERSED
3414 for (i = not_stack; i < size; i++)
3415 #else
3416 for (i = size - 1; i >= not_stack; i--)
3417 #endif
3418 if (i >= not_stack + offset)
3419 emit_push_insn (operand_subword_force (x, i, mode),
3420 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3421 0, args_addr,
3422 GEN_INT (args_offset + ((i - not_stack + skip)
3423 * UNITS_PER_WORD)),
3424 reg_parm_stack_space, alignment_pad);
3426 else
3428 rtx addr;
3429 rtx target = NULL_RTX;
3430 rtx dest;
3432 /* Push padding now if padding above and stack grows down,
3433 or if padding below and stack grows up.
3434 But if space already allocated, this has already been done. */
3435 if (extra && args_addr == 0
3436 && where_pad != none && where_pad != stack_direction)
3437 anti_adjust_stack (GEN_INT (extra));
3439 #ifdef PUSH_ROUNDING
3440 if (args_addr == 0 && PUSH_ARGS)
3442 addr = gen_push_operand ();
3443 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3445 else
3446 #endif
3448 if (GET_CODE (args_so_far) == CONST_INT)
3449 addr
3450 = memory_address (mode,
3451 plus_constant (args_addr,
3452 INTVAL (args_so_far)));
3453 else
3454 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3455 args_so_far));
3456 target = addr;
3459 dest = gen_rtx_MEM (mode, addr);
3460 if (type != 0)
3462 set_mem_attributes (dest, type, 1);
3463 /* Function incoming arguments may overlap with sibling call
3464 outgoing arguments and we cannot allow reordering of reads
3465 from function arguments with stores to outgoing arguments
3466 of sibling calls. */
3467 MEM_ALIAS_SET (dest) = 0;
3470 emit_move_insn (dest, x);
3472 if (current_function_check_memory_usage && ! in_check_memory_usage)
3474 in_check_memory_usage = 1;
3475 if (target == 0)
3476 target = get_push_address (GET_MODE_SIZE (mode));
3478 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3479 emit_library_call (chkr_copy_bitmap_libfunc,
3480 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3481 Pmode, XEXP (x, 0), Pmode,
3482 GEN_INT (GET_MODE_SIZE (mode)),
3483 TYPE_MODE (sizetype));
3484 else
3485 emit_library_call (chkr_set_right_libfunc,
3486 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3487 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
3488 TYPE_MODE (sizetype),
3489 GEN_INT (MEMORY_USE_RW),
3490 TYPE_MODE (integer_type_node));
3491 in_check_memory_usage = 0;
3495 ret:
3496 /* If part should go in registers, copy that part
3497 into the appropriate registers. Do this now, at the end,
3498 since mem-to-mem copies above may do function calls. */
3499 if (partial > 0 && reg != 0)
3501 /* Handle calls that pass values in multiple non-contiguous locations.
3502 The Irix 6 ABI has examples of this. */
3503 if (GET_CODE (reg) == PARALLEL)
3504 emit_group_load (reg, x, -1, align); /* ??? size? */
3505 else
3506 move_block_to_reg (REGNO (reg), x, partial, mode);
3509 if (extra && args_addr == 0 && where_pad == stack_direction)
3510 anti_adjust_stack (GEN_INT (extra));
3512 if (alignment_pad && args_addr == 0)
3513 anti_adjust_stack (alignment_pad);
3516 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3517 operations. */
3519 static rtx
3520 get_subtarget (x)
3521 rtx x;
3523 return ((x == 0
3524 /* Only registers can be subtargets. */
3525 || GET_CODE (x) != REG
3526 /* If the register is readonly, it can't be set more than once. */
3527 || RTX_UNCHANGING_P (x)
3528 /* Don't use hard regs to avoid extending their life. */
3529 || REGNO (x) < FIRST_PSEUDO_REGISTER
3530 /* Avoid subtargets inside loops,
3531 since they hide some invariant expressions. */
3532 || preserve_subexpressions_p ())
3533 ? 0 : x);
3536 /* Expand an assignment that stores the value of FROM into TO.
3537 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3538 (This may contain a QUEUED rtx;
3539 if the value is constant, this rtx is a constant.)
3540 Otherwise, the returned value is NULL_RTX.
3542 SUGGEST_REG is no longer actually used.
3543 It used to mean, copy the value through a register
3544 and return that register, if that is possible.
3545 We now use WANT_VALUE to decide whether to do this. */
3548 expand_assignment (to, from, want_value, suggest_reg)
3549 tree to, from;
3550 int want_value;
3551 int suggest_reg ATTRIBUTE_UNUSED;
3553 register rtx to_rtx = 0;
3554 rtx result;
3556 /* Don't crash if the lhs of the assignment was erroneous. */
3558 if (TREE_CODE (to) == ERROR_MARK)
3560 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3561 return want_value ? result : NULL_RTX;
3564 /* Assignment of a structure component needs special treatment
3565 if the structure component's rtx is not simply a MEM.
3566 Assignment of an array element at a constant index, and assignment of
3567 an array element in an unaligned packed structure field, has the same
3568 problem. */
3570 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3571 || TREE_CODE (to) == ARRAY_REF)
3573 enum machine_mode mode1;
3574 HOST_WIDE_INT bitsize, bitpos;
3575 tree offset;
3576 int unsignedp;
3577 int volatilep = 0;
3578 tree tem;
3579 unsigned int alignment;
3581 push_temp_slots ();
3582 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3583 &unsignedp, &volatilep, &alignment);
3585 /* If we are going to use store_bit_field and extract_bit_field,
3586 make sure to_rtx will be safe for multiple use. */
3588 if (mode1 == VOIDmode && want_value)
3589 tem = stabilize_reference (tem);
3591 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3592 if (offset != 0)
3594 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3596 if (GET_CODE (to_rtx) != MEM)
3597 abort ();
3599 if (GET_MODE (offset_rtx) != ptr_mode)
3601 #ifdef POINTERS_EXTEND_UNSIGNED
3602 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3603 #else
3604 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3605 #endif
3608 /* A constant address in TO_RTX can have VOIDmode, we must not try
3609 to call force_reg for that case. Avoid that case. */
3610 if (GET_CODE (to_rtx) == MEM
3611 && GET_MODE (to_rtx) == BLKmode
3612 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3613 && bitsize
3614 && (bitpos % bitsize) == 0
3615 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3616 && alignment == GET_MODE_ALIGNMENT (mode1))
3618 rtx temp = change_address (to_rtx, mode1,
3619 plus_constant (XEXP (to_rtx, 0),
3620 (bitpos /
3621 BITS_PER_UNIT)));
3622 if (GET_CODE (XEXP (temp, 0)) == REG)
3623 to_rtx = temp;
3624 else
3625 to_rtx = change_address (to_rtx, mode1,
3626 force_reg (GET_MODE (XEXP (temp, 0)),
3627 XEXP (temp, 0)));
3628 bitpos = 0;
3631 to_rtx = change_address (to_rtx, VOIDmode,
3632 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3633 force_reg (ptr_mode,
3634 offset_rtx)));
3637 if (volatilep)
3639 if (GET_CODE (to_rtx) == MEM)
3641 /* When the offset is zero, to_rtx is the address of the
3642 structure we are storing into, and hence may be shared.
3643 We must make a new MEM before setting the volatile bit. */
3644 if (offset == 0)
3645 to_rtx = copy_rtx (to_rtx);
3647 MEM_VOLATILE_P (to_rtx) = 1;
3649 #if 0 /* This was turned off because, when a field is volatile
3650 in an object which is not volatile, the object may be in a register,
3651 and then we would abort over here. */
3652 else
3653 abort ();
3654 #endif
3657 if (TREE_CODE (to) == COMPONENT_REF
3658 && TREE_READONLY (TREE_OPERAND (to, 1)))
3660 if (offset == 0)
3661 to_rtx = copy_rtx (to_rtx);
3663 RTX_UNCHANGING_P (to_rtx) = 1;
3666 /* Check the access. */
3667 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3669 rtx to_addr;
3670 int size;
3671 int best_mode_size;
3672 enum machine_mode best_mode;
3674 best_mode = get_best_mode (bitsize, bitpos,
3675 TYPE_ALIGN (TREE_TYPE (tem)),
3676 mode1, volatilep);
3677 if (best_mode == VOIDmode)
3678 best_mode = QImode;
3680 best_mode_size = GET_MODE_BITSIZE (best_mode);
3681 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3682 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3683 size *= GET_MODE_SIZE (best_mode);
3685 /* Check the access right of the pointer. */
3686 in_check_memory_usage = 1;
3687 if (size)
3688 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3689 VOIDmode, 3, to_addr, Pmode,
3690 GEN_INT (size), TYPE_MODE (sizetype),
3691 GEN_INT (MEMORY_USE_WO),
3692 TYPE_MODE (integer_type_node));
3693 in_check_memory_usage = 0;
3696 /* If this is a varying-length object, we must get the address of
3697 the source and do an explicit block move. */
3698 if (bitsize < 0)
3700 unsigned int from_align;
3701 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3702 rtx inner_to_rtx
3703 = change_address (to_rtx, VOIDmode,
3704 plus_constant (XEXP (to_rtx, 0),
3705 bitpos / BITS_PER_UNIT));
3707 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
3708 MIN (alignment, from_align));
3709 free_temp_slots ();
3710 pop_temp_slots ();
3711 return to_rtx;
3713 else
3715 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3716 (want_value
3717 /* Spurious cast for HPUX compiler. */
3718 ? ((enum machine_mode)
3719 TYPE_MODE (TREE_TYPE (to)))
3720 : VOIDmode),
3721 unsignedp,
3722 alignment,
3723 int_size_in_bytes (TREE_TYPE (tem)),
3724 get_alias_set (to));
3726 preserve_temp_slots (result);
3727 free_temp_slots ();
3728 pop_temp_slots ();
3730 /* If the value is meaningful, convert RESULT to the proper mode.
3731 Otherwise, return nothing. */
3732 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3733 TYPE_MODE (TREE_TYPE (from)),
3734 result,
3735 TREE_UNSIGNED (TREE_TYPE (to)))
3736 : NULL_RTX);
3740 /* If the rhs is a function call and its value is not an aggregate,
3741 call the function before we start to compute the lhs.
3742 This is needed for correct code for cases such as
3743 val = setjmp (buf) on machines where reference to val
3744 requires loading up part of an address in a separate insn.
3746 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3747 since it might be a promoted variable where the zero- or sign- extension
3748 needs to be done. Handling this in the normal way is safe because no
3749 computation is done before the call. */
3750 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3751 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3752 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3753 && GET_CODE (DECL_RTL (to)) == REG))
3755 rtx value;
3757 push_temp_slots ();
3758 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3759 if (to_rtx == 0)
3760 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3762 /* Handle calls that return values in multiple non-contiguous locations.
3763 The Irix 6 ABI has examples of this. */
3764 if (GET_CODE (to_rtx) == PARALLEL)
3765 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3766 TYPE_ALIGN (TREE_TYPE (from)));
3767 else if (GET_MODE (to_rtx) == BLKmode)
3768 emit_block_move (to_rtx, value, expr_size (from),
3769 TYPE_ALIGN (TREE_TYPE (from)));
3770 else
3772 #ifdef POINTERS_EXTEND_UNSIGNED
3773 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3774 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3775 value = convert_memory_address (GET_MODE (to_rtx), value);
3776 #endif
3777 emit_move_insn (to_rtx, value);
3779 preserve_temp_slots (to_rtx);
3780 free_temp_slots ();
3781 pop_temp_slots ();
3782 return want_value ? to_rtx : NULL_RTX;
3785 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3786 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3788 if (to_rtx == 0)
3790 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3791 if (GET_CODE (to_rtx) == MEM)
3792 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3795 /* Don't move directly into a return register. */
3796 if (TREE_CODE (to) == RESULT_DECL
3797 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3799 rtx temp;
3801 push_temp_slots ();
3802 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3804 if (GET_CODE (to_rtx) == PARALLEL)
3805 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3806 TYPE_ALIGN (TREE_TYPE (from)));
3807 else
3808 emit_move_insn (to_rtx, temp);
3810 preserve_temp_slots (to_rtx);
3811 free_temp_slots ();
3812 pop_temp_slots ();
3813 return want_value ? to_rtx : NULL_RTX;
3816 /* In case we are returning the contents of an object which overlaps
3817 the place the value is being stored, use a safe function when copying
3818 a value through a pointer into a structure value return block. */
3819 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3820 && current_function_returns_struct
3821 && !current_function_returns_pcc_struct)
3823 rtx from_rtx, size;
3825 push_temp_slots ();
3826 size = expr_size (from);
3827 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3828 EXPAND_MEMORY_USE_DONT);
3830 /* Copy the rights of the bitmap. */
3831 if (current_function_check_memory_usage)
3832 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3833 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3834 XEXP (from_rtx, 0), Pmode,
3835 convert_to_mode (TYPE_MODE (sizetype),
3836 size, TREE_UNSIGNED (sizetype)),
3837 TYPE_MODE (sizetype));
3839 #ifdef TARGET_MEM_FUNCTIONS
3840 emit_library_call (memmove_libfunc, LCT_NORMAL,
3841 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3842 XEXP (from_rtx, 0), Pmode,
3843 convert_to_mode (TYPE_MODE (sizetype),
3844 size, TREE_UNSIGNED (sizetype)),
3845 TYPE_MODE (sizetype));
3846 #else
3847 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3848 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3849 XEXP (to_rtx, 0), Pmode,
3850 convert_to_mode (TYPE_MODE (integer_type_node),
3851 size, TREE_UNSIGNED (integer_type_node)),
3852 TYPE_MODE (integer_type_node));
3853 #endif
3855 preserve_temp_slots (to_rtx);
3856 free_temp_slots ();
3857 pop_temp_slots ();
3858 return want_value ? to_rtx : NULL_RTX;
3861 /* Compute FROM and store the value in the rtx we got. */
3863 push_temp_slots ();
3864 result = store_expr (from, to_rtx, want_value);
3865 preserve_temp_slots (result);
3866 free_temp_slots ();
3867 pop_temp_slots ();
3868 return want_value ? result : NULL_RTX;
3871 /* Generate code for computing expression EXP,
3872 and storing the value into TARGET.
3873 TARGET may contain a QUEUED rtx.
3875 If WANT_VALUE is nonzero, return a copy of the value
3876 not in TARGET, so that we can be sure to use the proper
3877 value in a containing expression even if TARGET has something
3878 else stored in it. If possible, we copy the value through a pseudo
3879 and return that pseudo. Or, if the value is constant, we try to
3880 return the constant. In some cases, we return a pseudo
3881 copied *from* TARGET.
3883 If the mode is BLKmode then we may return TARGET itself.
3884 It turns out that in BLKmode it doesn't cause a problem.
3885 because C has no operators that could combine two different
3886 assignments into the same BLKmode object with different values
3887 with no sequence point. Will other languages need this to
3888 be more thorough?
3890 If WANT_VALUE is 0, we return NULL, to make sure
3891 to catch quickly any cases where the caller uses the value
3892 and fails to set WANT_VALUE. */
3895 store_expr (exp, target, want_value)
3896 register tree exp;
3897 register rtx target;
3898 int want_value;
3900 register rtx temp;
3901 int dont_return_target = 0;
3903 if (TREE_CODE (exp) == COMPOUND_EXPR)
3905 /* Perform first part of compound expression, then assign from second
3906 part. */
3907 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3908 emit_queue ();
3909 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3911 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3913 /* For conditional expression, get safe form of the target. Then
3914 test the condition, doing the appropriate assignment on either
3915 side. This avoids the creation of unnecessary temporaries.
3916 For non-BLKmode, it is more efficient not to do this. */
3918 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3920 emit_queue ();
3921 target = protect_from_queue (target, 1);
3923 do_pending_stack_adjust ();
3924 NO_DEFER_POP;
3925 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3926 start_cleanup_deferral ();
3927 store_expr (TREE_OPERAND (exp, 1), target, 0);
3928 end_cleanup_deferral ();
3929 emit_queue ();
3930 emit_jump_insn (gen_jump (lab2));
3931 emit_barrier ();
3932 emit_label (lab1);
3933 start_cleanup_deferral ();
3934 store_expr (TREE_OPERAND (exp, 2), target, 0);
3935 end_cleanup_deferral ();
3936 emit_queue ();
3937 emit_label (lab2);
3938 OK_DEFER_POP;
3940 return want_value ? target : NULL_RTX;
3942 else if (queued_subexp_p (target))
3943 /* If target contains a postincrement, let's not risk
3944 using it as the place to generate the rhs. */
3946 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3948 /* Expand EXP into a new pseudo. */
3949 temp = gen_reg_rtx (GET_MODE (target));
3950 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3952 else
3953 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3955 /* If target is volatile, ANSI requires accessing the value
3956 *from* the target, if it is accessed. So make that happen.
3957 In no case return the target itself. */
3958 if (! MEM_VOLATILE_P (target) && want_value)
3959 dont_return_target = 1;
3961 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3962 && GET_MODE (target) != BLKmode)
3963 /* If target is in memory and caller wants value in a register instead,
3964 arrange that. Pass TARGET as target for expand_expr so that,
3965 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3966 We know expand_expr will not use the target in that case.
3967 Don't do this if TARGET is volatile because we are supposed
3968 to write it and then read it. */
3970 temp = expand_expr (exp, target, GET_MODE (target), 0);
3971 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3972 temp = copy_to_reg (temp);
3973 dont_return_target = 1;
3975 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3976 /* If this is an scalar in a register that is stored in a wider mode
3977 than the declared mode, compute the result into its declared mode
3978 and then convert to the wider mode. Our value is the computed
3979 expression. */
3981 /* If we don't want a value, we can do the conversion inside EXP,
3982 which will often result in some optimizations. Do the conversion
3983 in two steps: first change the signedness, if needed, then
3984 the extend. But don't do this if the type of EXP is a subtype
3985 of something else since then the conversion might involve
3986 more than just converting modes. */
3987 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3988 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3990 if (TREE_UNSIGNED (TREE_TYPE (exp))
3991 != SUBREG_PROMOTED_UNSIGNED_P (target))
3993 = convert
3994 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3995 TREE_TYPE (exp)),
3996 exp);
3998 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3999 SUBREG_PROMOTED_UNSIGNED_P (target)),
4000 exp);
4003 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4005 /* If TEMP is a volatile MEM and we want a result value, make
4006 the access now so it gets done only once. Likewise if
4007 it contains TARGET. */
4008 if (GET_CODE (temp) == MEM && want_value
4009 && (MEM_VOLATILE_P (temp)
4010 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4011 temp = copy_to_reg (temp);
4013 /* If TEMP is a VOIDmode constant, use convert_modes to make
4014 sure that we properly convert it. */
4015 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4016 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4017 TYPE_MODE (TREE_TYPE (exp)), temp,
4018 SUBREG_PROMOTED_UNSIGNED_P (target));
4020 convert_move (SUBREG_REG (target), temp,
4021 SUBREG_PROMOTED_UNSIGNED_P (target));
4023 /* If we promoted a constant, change the mode back down to match
4024 target. Otherwise, the caller might get confused by a result whose
4025 mode is larger than expected. */
4027 if (want_value && GET_MODE (temp) != GET_MODE (target)
4028 && GET_MODE (temp) != VOIDmode)
4030 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
4031 SUBREG_PROMOTED_VAR_P (temp) = 1;
4032 SUBREG_PROMOTED_UNSIGNED_P (temp)
4033 = SUBREG_PROMOTED_UNSIGNED_P (target);
4036 return want_value ? temp : NULL_RTX;
4038 else
4040 temp = expand_expr (exp, target, GET_MODE (target), 0);
4041 /* Return TARGET if it's a specified hardware register.
4042 If TARGET is a volatile mem ref, either return TARGET
4043 or return a reg copied *from* TARGET; ANSI requires this.
4045 Otherwise, if TEMP is not TARGET, return TEMP
4046 if it is constant (for efficiency),
4047 or if we really want the correct value. */
4048 if (!(target && GET_CODE (target) == REG
4049 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4050 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4051 && ! rtx_equal_p (temp, target)
4052 && (CONSTANT_P (temp) || want_value))
4053 dont_return_target = 1;
4056 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4057 the same as that of TARGET, adjust the constant. This is needed, for
4058 example, in case it is a CONST_DOUBLE and we want only a word-sized
4059 value. */
4060 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4061 && TREE_CODE (exp) != ERROR_MARK
4062 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4063 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4064 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4066 if (current_function_check_memory_usage
4067 && GET_CODE (target) == MEM
4068 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
4070 in_check_memory_usage = 1;
4071 if (GET_CODE (temp) == MEM)
4072 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
4073 VOIDmode, 3, XEXP (target, 0), Pmode,
4074 XEXP (temp, 0), Pmode,
4075 expr_size (exp), TYPE_MODE (sizetype));
4076 else
4077 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
4078 VOIDmode, 3, XEXP (target, 0), Pmode,
4079 expr_size (exp), TYPE_MODE (sizetype),
4080 GEN_INT (MEMORY_USE_WO),
4081 TYPE_MODE (integer_type_node));
4082 in_check_memory_usage = 0;
4085 /* If value was not generated in the target, store it there.
4086 Convert the value to TARGET's type first if nec. */
4087 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4088 one or both of them are volatile memory refs, we have to distinguish
4089 two cases:
4090 - expand_expr has used TARGET. In this case, we must not generate
4091 another copy. This can be detected by TARGET being equal according
4092 to == .
4093 - expand_expr has not used TARGET - that means that the source just
4094 happens to have the same RTX form. Since temp will have been created
4095 by expand_expr, it will compare unequal according to == .
4096 We must generate a copy in this case, to reach the correct number
4097 of volatile memory references. */
4099 if ((! rtx_equal_p (temp, target)
4100 || (temp != target && (side_effects_p (temp)
4101 || side_effects_p (target))))
4102 && TREE_CODE (exp) != ERROR_MARK)
4104 target = protect_from_queue (target, 1);
4105 if (GET_MODE (temp) != GET_MODE (target)
4106 && GET_MODE (temp) != VOIDmode)
4108 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4109 if (dont_return_target)
4111 /* In this case, we will return TEMP,
4112 so make sure it has the proper mode.
4113 But don't forget to store the value into TARGET. */
4114 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4115 emit_move_insn (target, temp);
4117 else
4118 convert_move (target, temp, unsignedp);
4121 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4123 /* Handle copying a string constant into an array.
4124 The string constant may be shorter than the array.
4125 So copy just the string's actual length, and clear the rest. */
4126 rtx size;
4127 rtx addr;
4129 /* Get the size of the data type of the string,
4130 which is actually the size of the target. */
4131 size = expr_size (exp);
4132 if (GET_CODE (size) == CONST_INT
4133 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4134 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
4135 else
4137 /* Compute the size of the data to copy from the string. */
4138 tree copy_size
4139 = size_binop (MIN_EXPR,
4140 make_tree (sizetype, size),
4141 size_int (TREE_STRING_LENGTH (exp)));
4142 unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
4143 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4144 VOIDmode, 0);
4145 rtx label = 0;
4147 /* Copy that much. */
4148 emit_block_move (target, temp, copy_size_rtx,
4149 TYPE_ALIGN (TREE_TYPE (exp)));
4151 /* Figure out how much is left in TARGET that we have to clear.
4152 Do all calculations in ptr_mode. */
4154 addr = XEXP (target, 0);
4155 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4157 if (GET_CODE (copy_size_rtx) == CONST_INT)
4159 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
4160 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
4161 align = MIN (align,
4162 (unsigned int) (BITS_PER_UNIT
4163 * (INTVAL (copy_size_rtx)
4164 & - INTVAL (copy_size_rtx))));
4166 else
4168 addr = force_reg (ptr_mode, addr);
4169 addr = expand_binop (ptr_mode, add_optab, addr,
4170 copy_size_rtx, NULL_RTX, 0,
4171 OPTAB_LIB_WIDEN);
4173 size = expand_binop (ptr_mode, sub_optab, size,
4174 copy_size_rtx, NULL_RTX, 0,
4175 OPTAB_LIB_WIDEN);
4177 align = BITS_PER_UNIT;
4178 label = gen_label_rtx ();
4179 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4180 GET_MODE (size), 0, 0, label);
4182 align = MIN (align, expr_align (copy_size));
4184 if (size != const0_rtx)
4186 rtx dest = gen_rtx_MEM (BLKmode, addr);
4188 MEM_COPY_ATTRIBUTES (dest, target);
4190 /* Be sure we can write on ADDR. */
4191 in_check_memory_usage = 1;
4192 if (current_function_check_memory_usage)
4193 emit_library_call (chkr_check_addr_libfunc,
4194 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4195 addr, Pmode,
4196 size, TYPE_MODE (sizetype),
4197 GEN_INT (MEMORY_USE_WO),
4198 TYPE_MODE (integer_type_node));
4199 in_check_memory_usage = 0;
4200 clear_storage (dest, size, align);
4203 if (label)
4204 emit_label (label);
4207 /* Handle calls that return values in multiple non-contiguous locations.
4208 The Irix 6 ABI has examples of this. */
4209 else if (GET_CODE (target) == PARALLEL)
4210 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4211 TYPE_ALIGN (TREE_TYPE (exp)));
4212 else if (GET_MODE (temp) == BLKmode)
4213 emit_block_move (target, temp, expr_size (exp),
4214 TYPE_ALIGN (TREE_TYPE (exp)));
4215 else
4216 emit_move_insn (target, temp);
4219 /* If we don't want a value, return NULL_RTX. */
4220 if (! want_value)
4221 return NULL_RTX;
4223 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4224 ??? The latter test doesn't seem to make sense. */
4225 else if (dont_return_target && GET_CODE (temp) != MEM)
4226 return temp;
4228 /* Return TARGET itself if it is a hard register. */
4229 else if (want_value && GET_MODE (target) != BLKmode
4230 && ! (GET_CODE (target) == REG
4231 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4232 return copy_to_reg (target);
4234 else
4235 return target;
4238 /* Return 1 if EXP just contains zeros. */
4240 static int
4241 is_zeros_p (exp)
4242 tree exp;
4244 tree elt;
4246 switch (TREE_CODE (exp))
4248 case CONVERT_EXPR:
4249 case NOP_EXPR:
4250 case NON_LVALUE_EXPR:
4251 return is_zeros_p (TREE_OPERAND (exp, 0));
4253 case INTEGER_CST:
4254 return integer_zerop (exp);
4256 case COMPLEX_CST:
4257 return
4258 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4260 case REAL_CST:
4261 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4263 case CONSTRUCTOR:
4264 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4265 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4266 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4267 if (! is_zeros_p (TREE_VALUE (elt)))
4268 return 0;
4270 return 1;
4272 default:
4273 return 0;
4277 /* Return 1 if EXP contains mostly (3/4) zeros. */
4279 static int
4280 mostly_zeros_p (exp)
4281 tree exp;
4283 if (TREE_CODE (exp) == CONSTRUCTOR)
4285 int elts = 0, zeros = 0;
4286 tree elt = CONSTRUCTOR_ELTS (exp);
4287 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4289 /* If there are no ranges of true bits, it is all zero. */
4290 return elt == NULL_TREE;
4292 for (; elt; elt = TREE_CHAIN (elt))
4294 /* We do not handle the case where the index is a RANGE_EXPR,
4295 so the statistic will be somewhat inaccurate.
4296 We do make a more accurate count in store_constructor itself,
4297 so since this function is only used for nested array elements,
4298 this should be close enough. */
4299 if (mostly_zeros_p (TREE_VALUE (elt)))
4300 zeros++;
4301 elts++;
4304 return 4 * zeros >= 3 * elts;
4307 return is_zeros_p (exp);
4310 /* Helper function for store_constructor.
4311 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4312 TYPE is the type of the CONSTRUCTOR, not the element type.
4313 ALIGN and CLEARED are as for store_constructor.
4314 ALIAS_SET is the alias set to use for any stores.
4316 This provides a recursive shortcut back to store_constructor when it isn't
4317 necessary to go through store_field. This is so that we can pass through
4318 the cleared field to let store_constructor know that we may not have to
4319 clear a substructure if the outer structure has already been cleared. */
4321 static void
4322 store_constructor_field (target, bitsize, bitpos,
4323 mode, exp, type, align, cleared, alias_set)
4324 rtx target;
4325 unsigned HOST_WIDE_INT bitsize;
4326 HOST_WIDE_INT bitpos;
4327 enum machine_mode mode;
4328 tree exp, type;
4329 unsigned int align;
4330 int cleared;
4331 int alias_set;
4333 if (TREE_CODE (exp) == CONSTRUCTOR
4334 && bitpos % BITS_PER_UNIT == 0
4335 /* If we have a non-zero bitpos for a register target, then we just
4336 let store_field do the bitfield handling. This is unlikely to
4337 generate unnecessary clear instructions anyways. */
4338 && (bitpos == 0 || GET_CODE (target) == MEM))
4340 if (bitpos != 0)
4341 target
4342 = change_address (target,
4343 GET_MODE (target) == BLKmode
4344 || 0 != (bitpos
4345 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4346 ? BLKmode : VOIDmode,
4347 plus_constant (XEXP (target, 0),
4348 bitpos / BITS_PER_UNIT));
4351 /* Show the alignment may no longer be what it was and update the alias
4352 set, if required. */
4353 if (bitpos != 0)
4354 align = MIN (align, (unsigned int) bitpos & - bitpos);
4355 if (GET_CODE (target) == MEM)
4356 MEM_ALIAS_SET (target) = alias_set;
4358 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4360 else
4361 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4362 int_size_in_bytes (type), alias_set);
4365 /* Store the value of constructor EXP into the rtx TARGET.
4366 TARGET is either a REG or a MEM.
4367 ALIGN is the maximum known alignment for TARGET.
4368 CLEARED is true if TARGET is known to have been zero'd.
4369 SIZE is the number of bytes of TARGET we are allowed to modify: this
4370 may not be the same as the size of EXP if we are assigning to a field
4371 which has been packed to exclude padding bits. */
4373 static void
4374 store_constructor (exp, target, align, cleared, size)
4375 tree exp;
4376 rtx target;
4377 unsigned int align;
4378 int cleared;
4379 HOST_WIDE_INT size;
4381 tree type = TREE_TYPE (exp);
4382 #ifdef WORD_REGISTER_OPERATIONS
4383 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4384 #endif
4386 /* We know our target cannot conflict, since safe_from_p has been called. */
4387 #if 0
4388 /* Don't try copying piece by piece into a hard register
4389 since that is vulnerable to being clobbered by EXP.
4390 Instead, construct in a pseudo register and then copy it all. */
4391 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4393 rtx temp = gen_reg_rtx (GET_MODE (target));
4394 store_constructor (exp, temp, align, cleared, size);
4395 emit_move_insn (target, temp);
4396 return;
4398 #endif
4400 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4401 || TREE_CODE (type) == QUAL_UNION_TYPE)
4403 register tree elt;
4405 /* Inform later passes that the whole union value is dead. */
4406 if ((TREE_CODE (type) == UNION_TYPE
4407 || TREE_CODE (type) == QUAL_UNION_TYPE)
4408 && ! cleared)
4410 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4412 /* If the constructor is empty, clear the union. */
4413 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4414 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4417 /* If we are building a static constructor into a register,
4418 set the initial value as zero so we can fold the value into
4419 a constant. But if more than one register is involved,
4420 this probably loses. */
4421 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4422 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4424 if (! cleared)
4425 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4427 cleared = 1;
4430 /* If the constructor has fewer fields than the structure
4431 or if we are initializing the structure to mostly zeros,
4432 clear the whole structure first. Don't do this is TARGET is
4433 register whose mode size isn't equal to SIZE since clear_storage
4434 can't handle this case. */
4435 else if (size > 0
4436 && ((list_length (CONSTRUCTOR_ELTS (exp))
4437 != fields_length (type))
4438 || mostly_zeros_p (exp))
4439 && (GET_CODE (target) != REG
4440 || (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) == size))
4442 if (! cleared)
4443 clear_storage (target, GEN_INT (size), align);
4445 cleared = 1;
4447 else if (! cleared)
4448 /* Inform later passes that the old value is dead. */
4449 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4451 /* Store each element of the constructor into
4452 the corresponding field of TARGET. */
4454 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4456 register tree field = TREE_PURPOSE (elt);
4457 #ifdef WORD_REGISTER_OPERATIONS
4458 tree value = TREE_VALUE (elt);
4459 #endif
4460 register enum machine_mode mode;
4461 HOST_WIDE_INT bitsize;
4462 HOST_WIDE_INT bitpos = 0;
4463 int unsignedp;
4464 tree offset;
4465 rtx to_rtx = target;
4467 /* Just ignore missing fields.
4468 We cleared the whole structure, above,
4469 if any fields are missing. */
4470 if (field == 0)
4471 continue;
4473 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4474 continue;
4476 if (host_integerp (DECL_SIZE (field), 1))
4477 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4478 else
4479 bitsize = -1;
4481 unsignedp = TREE_UNSIGNED (field);
4482 mode = DECL_MODE (field);
4483 if (DECL_BIT_FIELD (field))
4484 mode = VOIDmode;
4486 offset = DECL_FIELD_OFFSET (field);
4487 if (host_integerp (offset, 0)
4488 && host_integerp (bit_position (field), 0))
4490 bitpos = int_bit_position (field);
4491 offset = 0;
4493 else
4494 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4496 if (offset)
4498 rtx offset_rtx;
4500 if (contains_placeholder_p (offset))
4501 offset = build (WITH_RECORD_EXPR, sizetype,
4502 offset, make_tree (TREE_TYPE (exp), target));
4504 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4505 if (GET_CODE (to_rtx) != MEM)
4506 abort ();
4508 if (GET_MODE (offset_rtx) != ptr_mode)
4510 #ifdef POINTERS_EXTEND_UNSIGNED
4511 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4512 #else
4513 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4514 #endif
4517 to_rtx
4518 = change_address (to_rtx, VOIDmode,
4519 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4520 force_reg (ptr_mode,
4521 offset_rtx)));
4522 align = DECL_OFFSET_ALIGN (field);
4525 if (TREE_READONLY (field))
4527 if (GET_CODE (to_rtx) == MEM)
4528 to_rtx = copy_rtx (to_rtx);
4530 RTX_UNCHANGING_P (to_rtx) = 1;
4533 #ifdef WORD_REGISTER_OPERATIONS
4534 /* If this initializes a field that is smaller than a word, at the
4535 start of a word, try to widen it to a full word.
4536 This special case allows us to output C++ member function
4537 initializations in a form that the optimizers can understand. */
4538 if (GET_CODE (target) == REG
4539 && bitsize < BITS_PER_WORD
4540 && bitpos % BITS_PER_WORD == 0
4541 && GET_MODE_CLASS (mode) == MODE_INT
4542 && TREE_CODE (value) == INTEGER_CST
4543 && exp_size >= 0
4544 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4546 tree type = TREE_TYPE (value);
4547 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4549 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4550 value = convert (type, value);
4552 if (BYTES_BIG_ENDIAN)
4553 value
4554 = fold (build (LSHIFT_EXPR, type, value,
4555 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4556 bitsize = BITS_PER_WORD;
4557 mode = word_mode;
4559 #endif
4560 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4561 TREE_VALUE (elt), type, align, cleared,
4562 (DECL_NONADDRESSABLE_P (field)
4563 && GET_CODE (to_rtx) == MEM)
4564 ? MEM_ALIAS_SET (to_rtx)
4565 : get_alias_set (TREE_TYPE (field)));
4568 else if (TREE_CODE (type) == ARRAY_TYPE)
4570 register tree elt;
4571 register int i;
4572 int need_to_clear;
4573 tree domain = TYPE_DOMAIN (type);
4574 tree elttype = TREE_TYPE (type);
4575 int const_bounds_p = (host_integerp (TYPE_MIN_VALUE (domain), 0)
4576 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4577 HOST_WIDE_INT minelt;
4578 HOST_WIDE_INT maxelt;
4580 /* If we have constant bounds for the range of the type, get them. */
4581 if (const_bounds_p)
4583 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4584 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4587 /* If the constructor has fewer elements than the array,
4588 clear the whole array first. Similarly if this is
4589 static constructor of a non-BLKmode object. */
4590 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4591 need_to_clear = 1;
4592 else
4594 HOST_WIDE_INT count = 0, zero_count = 0;
4595 need_to_clear = ! const_bounds_p;
4597 /* This loop is a more accurate version of the loop in
4598 mostly_zeros_p (it handles RANGE_EXPR in an index).
4599 It is also needed to check for missing elements. */
4600 for (elt = CONSTRUCTOR_ELTS (exp);
4601 elt != NULL_TREE && ! need_to_clear;
4602 elt = TREE_CHAIN (elt))
4604 tree index = TREE_PURPOSE (elt);
4605 HOST_WIDE_INT this_node_count;
4607 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4609 tree lo_index = TREE_OPERAND (index, 0);
4610 tree hi_index = TREE_OPERAND (index, 1);
4612 if (! host_integerp (lo_index, 1)
4613 || ! host_integerp (hi_index, 1))
4615 need_to_clear = 1;
4616 break;
4619 this_node_count = (tree_low_cst (hi_index, 1)
4620 - tree_low_cst (lo_index, 1) + 1);
4622 else
4623 this_node_count = 1;
4625 count += this_node_count;
4626 if (mostly_zeros_p (TREE_VALUE (elt)))
4627 zero_count += this_node_count;
4630 /* Clear the entire array first if there are any missing elements,
4631 or if the incidence of zero elements is >= 75%. */
4632 if (! need_to_clear
4633 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4634 need_to_clear = 1;
4637 if (need_to_clear && size > 0)
4639 if (! cleared)
4640 clear_storage (target, GEN_INT (size), align);
4641 cleared = 1;
4643 else
4644 /* Inform later passes that the old value is dead. */
4645 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4647 /* Store each element of the constructor into
4648 the corresponding element of TARGET, determined
4649 by counting the elements. */
4650 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4651 elt;
4652 elt = TREE_CHAIN (elt), i++)
4654 register enum machine_mode mode;
4655 HOST_WIDE_INT bitsize;
4656 HOST_WIDE_INT bitpos;
4657 int unsignedp;
4658 tree value = TREE_VALUE (elt);
4659 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4660 tree index = TREE_PURPOSE (elt);
4661 rtx xtarget = target;
4663 if (cleared && is_zeros_p (value))
4664 continue;
4666 unsignedp = TREE_UNSIGNED (elttype);
4667 mode = TYPE_MODE (elttype);
4668 if (mode == BLKmode)
4669 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4670 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4671 : -1);
4672 else
4673 bitsize = GET_MODE_BITSIZE (mode);
4675 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4677 tree lo_index = TREE_OPERAND (index, 0);
4678 tree hi_index = TREE_OPERAND (index, 1);
4679 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4680 struct nesting *loop;
4681 HOST_WIDE_INT lo, hi, count;
4682 tree position;
4684 /* If the range is constant and "small", unroll the loop. */
4685 if (const_bounds_p
4686 && host_integerp (lo_index, 0)
4687 && host_integerp (hi_index, 0)
4688 && (lo = tree_low_cst (lo_index, 0),
4689 hi = tree_low_cst (hi_index, 0),
4690 count = hi - lo + 1,
4691 (GET_CODE (target) != MEM
4692 || count <= 2
4693 || (host_integerp (TYPE_SIZE (elttype), 1)
4694 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4695 <= 40 * 8)))))
4697 lo -= minelt; hi -= minelt;
4698 for (; lo <= hi; lo++)
4700 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4701 store_constructor_field
4702 (target, bitsize, bitpos, mode, value, type, align,
4703 cleared,
4704 TYPE_NONALIASED_COMPONENT (type)
4705 ? MEM_ALIAS_SET (target) : get_alias_set (elttype));
4708 else
4710 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4711 loop_top = gen_label_rtx ();
4712 loop_end = gen_label_rtx ();
4714 unsignedp = TREE_UNSIGNED (domain);
4716 index = build_decl (VAR_DECL, NULL_TREE, domain);
4718 index_r
4719 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4720 &unsignedp, 0));
4721 SET_DECL_RTL (index, index_r);
4722 if (TREE_CODE (value) == SAVE_EXPR
4723 && SAVE_EXPR_RTL (value) == 0)
4725 /* Make sure value gets expanded once before the
4726 loop. */
4727 expand_expr (value, const0_rtx, VOIDmode, 0);
4728 emit_queue ();
4730 store_expr (lo_index, index_r, 0);
4731 loop = expand_start_loop (0);
4733 /* Assign value to element index. */
4734 position
4735 = convert (ssizetype,
4736 fold (build (MINUS_EXPR, TREE_TYPE (index),
4737 index, TYPE_MIN_VALUE (domain))));
4738 position = size_binop (MULT_EXPR, position,
4739 convert (ssizetype,
4740 TYPE_SIZE_UNIT (elttype)));
4742 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4743 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4744 xtarget = change_address (target, mode, addr);
4745 if (TREE_CODE (value) == CONSTRUCTOR)
4746 store_constructor (value, xtarget, align, cleared,
4747 bitsize / BITS_PER_UNIT);
4748 else
4749 store_expr (value, xtarget, 0);
4751 expand_exit_loop_if_false (loop,
4752 build (LT_EXPR, integer_type_node,
4753 index, hi_index));
4755 expand_increment (build (PREINCREMENT_EXPR,
4756 TREE_TYPE (index),
4757 index, integer_one_node), 0, 0);
4758 expand_end_loop ();
4759 emit_label (loop_end);
4762 else if ((index != 0 && ! host_integerp (index, 0))
4763 || ! host_integerp (TYPE_SIZE (elttype), 1))
4765 rtx pos_rtx, addr;
4766 tree position;
4768 if (index == 0)
4769 index = ssize_int (1);
4771 if (minelt)
4772 index = convert (ssizetype,
4773 fold (build (MINUS_EXPR, index,
4774 TYPE_MIN_VALUE (domain))));
4776 position = size_binop (MULT_EXPR, index,
4777 convert (ssizetype,
4778 TYPE_SIZE_UNIT (elttype)));
4779 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4780 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4781 xtarget = change_address (target, mode, addr);
4782 store_expr (value, xtarget, 0);
4784 else
4786 if (index != 0)
4787 bitpos = ((tree_low_cst (index, 0) - minelt)
4788 * tree_low_cst (TYPE_SIZE (elttype), 1));
4789 else
4790 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4792 store_constructor_field (target, bitsize, bitpos, mode, value,
4793 type, align, cleared,
4794 TYPE_NONALIASED_COMPONENT (type)
4795 && GET_CODE (target) == MEM
4796 ? MEM_ALIAS_SET (target) :
4797 get_alias_set (elttype));
4803 /* Set constructor assignments. */
4804 else if (TREE_CODE (type) == SET_TYPE)
4806 tree elt = CONSTRUCTOR_ELTS (exp);
4807 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4808 tree domain = TYPE_DOMAIN (type);
4809 tree domain_min, domain_max, bitlength;
4811 /* The default implementation strategy is to extract the constant
4812 parts of the constructor, use that to initialize the target,
4813 and then "or" in whatever non-constant ranges we need in addition.
4815 If a large set is all zero or all ones, it is
4816 probably better to set it using memset (if available) or bzero.
4817 Also, if a large set has just a single range, it may also be
4818 better to first clear all the first clear the set (using
4819 bzero/memset), and set the bits we want. */
4821 /* Check for all zeros. */
4822 if (elt == NULL_TREE && size > 0)
4824 if (!cleared)
4825 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
4826 return;
4829 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4830 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4831 bitlength = size_binop (PLUS_EXPR,
4832 size_diffop (domain_max, domain_min),
4833 ssize_int (1));
4835 nbits = tree_low_cst (bitlength, 1);
4837 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4838 are "complicated" (more than one range), initialize (the
4839 constant parts) by copying from a constant. */
4840 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4841 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4843 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4844 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4845 char *bit_buffer = (char *) alloca (nbits);
4846 HOST_WIDE_INT word = 0;
4847 unsigned int bit_pos = 0;
4848 unsigned int ibit = 0;
4849 unsigned int offset = 0; /* In bytes from beginning of set. */
4851 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4852 for (;;)
4854 if (bit_buffer[ibit])
4856 if (BYTES_BIG_ENDIAN)
4857 word |= (1 << (set_word_size - 1 - bit_pos));
4858 else
4859 word |= 1 << bit_pos;
4862 bit_pos++; ibit++;
4863 if (bit_pos >= set_word_size || ibit == nbits)
4865 if (word != 0 || ! cleared)
4867 rtx datum = GEN_INT (word);
4868 rtx to_rtx;
4870 /* The assumption here is that it is safe to use
4871 XEXP if the set is multi-word, but not if
4872 it's single-word. */
4873 if (GET_CODE (target) == MEM)
4875 to_rtx = plus_constant (XEXP (target, 0), offset);
4876 to_rtx = change_address (target, mode, to_rtx);
4878 else if (offset == 0)
4879 to_rtx = target;
4880 else
4881 abort ();
4882 emit_move_insn (to_rtx, datum);
4885 if (ibit == nbits)
4886 break;
4887 word = 0;
4888 bit_pos = 0;
4889 offset += set_word_size / BITS_PER_UNIT;
4893 else if (!cleared)
4894 /* Don't bother clearing storage if the set is all ones. */
4895 if (TREE_CHAIN (elt) != NULL_TREE
4896 || (TREE_PURPOSE (elt) == NULL_TREE
4897 ? nbits != 1
4898 : ( ! host_integerp (TREE_VALUE (elt), 0)
4899 || ! host_integerp (TREE_PURPOSE (elt), 0)
4900 || (tree_low_cst (TREE_VALUE (elt), 0)
4901 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4902 != (HOST_WIDE_INT) nbits))))
4903 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4905 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4907 /* Start of range of element or NULL. */
4908 tree startbit = TREE_PURPOSE (elt);
4909 /* End of range of element, or element value. */
4910 tree endbit = TREE_VALUE (elt);
4911 #ifdef TARGET_MEM_FUNCTIONS
4912 HOST_WIDE_INT startb, endb;
4913 #endif
4914 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4916 bitlength_rtx = expand_expr (bitlength,
4917 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4919 /* Handle non-range tuple element like [ expr ]. */
4920 if (startbit == NULL_TREE)
4922 startbit = save_expr (endbit);
4923 endbit = startbit;
4926 startbit = convert (sizetype, startbit);
4927 endbit = convert (sizetype, endbit);
4928 if (! integer_zerop (domain_min))
4930 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4931 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4933 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4934 EXPAND_CONST_ADDRESS);
4935 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4936 EXPAND_CONST_ADDRESS);
4938 if (REG_P (target))
4940 targetx
4941 = assign_temp
4942 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
4943 TYPE_QUAL_CONST)),
4944 0, 1, 1);
4945 emit_move_insn (targetx, target);
4948 else if (GET_CODE (target) == MEM)
4949 targetx = target;
4950 else
4951 abort ();
4953 #ifdef TARGET_MEM_FUNCTIONS
4954 /* Optimization: If startbit and endbit are
4955 constants divisible by BITS_PER_UNIT,
4956 call memset instead. */
4957 if (TREE_CODE (startbit) == INTEGER_CST
4958 && TREE_CODE (endbit) == INTEGER_CST
4959 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4960 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4962 emit_library_call (memset_libfunc, LCT_NORMAL,
4963 VOIDmode, 3,
4964 plus_constant (XEXP (targetx, 0),
4965 startb / BITS_PER_UNIT),
4966 Pmode,
4967 constm1_rtx, TYPE_MODE (integer_type_node),
4968 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4969 TYPE_MODE (sizetype));
4971 else
4972 #endif
4973 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4974 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
4975 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
4976 startbit_rtx, TYPE_MODE (sizetype),
4977 endbit_rtx, TYPE_MODE (sizetype));
4979 if (REG_P (target))
4980 emit_move_insn (target, targetx);
4984 else
4985 abort ();
4988 /* Store the value of EXP (an expression tree)
4989 into a subfield of TARGET which has mode MODE and occupies
4990 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4991 If MODE is VOIDmode, it means that we are storing into a bit-field.
4993 If VALUE_MODE is VOIDmode, return nothing in particular.
4994 UNSIGNEDP is not used in this case.
4996 Otherwise, return an rtx for the value stored. This rtx
4997 has mode VALUE_MODE if that is convenient to do.
4998 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5000 ALIGN is the alignment that TARGET is known to have.
5001 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
5003 ALIAS_SET is the alias set for the destination. This value will
5004 (in general) be different from that for TARGET, since TARGET is a
5005 reference to the containing structure. */
5007 static rtx
5008 store_field (target, bitsize, bitpos, mode, exp, value_mode,
5009 unsignedp, align, total_size, alias_set)
5010 rtx target;
5011 HOST_WIDE_INT bitsize;
5012 HOST_WIDE_INT bitpos;
5013 enum machine_mode mode;
5014 tree exp;
5015 enum machine_mode value_mode;
5016 int unsignedp;
5017 unsigned int align;
5018 HOST_WIDE_INT total_size;
5019 int alias_set;
5021 HOST_WIDE_INT width_mask = 0;
5023 if (TREE_CODE (exp) == ERROR_MARK)
5024 return const0_rtx;
5026 /* If we have nothing to store, do nothing unless the expression has
5027 side-effects. */
5028 if (bitsize == 0)
5029 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5031 if (bitsize < HOST_BITS_PER_WIDE_INT)
5032 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5034 /* If we are storing into an unaligned field of an aligned union that is
5035 in a register, we may have the mode of TARGET being an integer mode but
5036 MODE == BLKmode. In that case, get an aligned object whose size and
5037 alignment are the same as TARGET and store TARGET into it (we can avoid
5038 the store if the field being stored is the entire width of TARGET). Then
5039 call ourselves recursively to store the field into a BLKmode version of
5040 that object. Finally, load from the object into TARGET. This is not
5041 very efficient in general, but should only be slightly more expensive
5042 than the otherwise-required unaligned accesses. Perhaps this can be
5043 cleaned up later. */
5045 if (mode == BLKmode
5046 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5048 rtx object
5049 = assign_temp
5050 (build_qualified_type (type_for_mode (GET_MODE (target), 0),
5051 TYPE_QUAL_CONST),
5052 0, 1, 1);
5053 rtx blk_object = copy_rtx (object);
5055 PUT_MODE (blk_object, BLKmode);
5057 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5058 emit_move_insn (object, target);
5060 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
5061 align, total_size, alias_set);
5063 /* Even though we aren't returning target, we need to
5064 give it the updated value. */
5065 emit_move_insn (target, object);
5067 return blk_object;
5070 if (GET_CODE (target) == CONCAT)
5072 /* We're storing into a struct containing a single __complex. */
5074 if (bitpos != 0)
5075 abort ();
5076 return store_expr (exp, target, 0);
5079 /* If the structure is in a register or if the component
5080 is a bit field, we cannot use addressing to access it.
5081 Use bit-field techniques or SUBREG to store in it. */
5083 if (mode == VOIDmode
5084 || (mode != BLKmode && ! direct_store[(int) mode]
5085 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5086 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5087 || GET_CODE (target) == REG
5088 || GET_CODE (target) == SUBREG
5089 /* If the field isn't aligned enough to store as an ordinary memref,
5090 store it as a bit field. */
5091 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5092 && (align < GET_MODE_ALIGNMENT (mode)
5093 || bitpos % GET_MODE_ALIGNMENT (mode)))
5094 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5095 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
5096 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
5097 /* If the RHS and field are a constant size and the size of the
5098 RHS isn't the same size as the bitfield, we must use bitfield
5099 operations. */
5100 || (bitsize >= 0
5101 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5102 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5104 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5106 /* If BITSIZE is narrower than the size of the type of EXP
5107 we will be narrowing TEMP. Normally, what's wanted are the
5108 low-order bits. However, if EXP's type is a record and this is
5109 big-endian machine, we want the upper BITSIZE bits. */
5110 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5111 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
5112 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5113 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5114 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5115 - bitsize),
5116 temp, 1);
5118 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5119 MODE. */
5120 if (mode != VOIDmode && mode != BLKmode
5121 && mode != TYPE_MODE (TREE_TYPE (exp)))
5122 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5124 /* If the modes of TARGET and TEMP are both BLKmode, both
5125 must be in memory and BITPOS must be aligned on a byte
5126 boundary. If so, we simply do a block copy. */
5127 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5129 unsigned int exp_align = expr_align (exp);
5131 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5132 || bitpos % BITS_PER_UNIT != 0)
5133 abort ();
5135 target = change_address (target, VOIDmode,
5136 plus_constant (XEXP (target, 0),
5137 bitpos / BITS_PER_UNIT));
5139 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
5140 align = MIN (exp_align, align);
5142 /* Find an alignment that is consistent with the bit position. */
5143 while ((bitpos % align) != 0)
5144 align >>= 1;
5146 emit_block_move (target, temp,
5147 bitsize == -1 ? expr_size (exp)
5148 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5149 / BITS_PER_UNIT),
5150 align);
5152 return value_mode == VOIDmode ? const0_rtx : target;
5155 /* Store the value in the bitfield. */
5156 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
5157 if (value_mode != VOIDmode)
5159 /* The caller wants an rtx for the value. */
5160 /* If possible, avoid refetching from the bitfield itself. */
5161 if (width_mask != 0
5162 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5164 tree count;
5165 enum machine_mode tmode;
5167 if (unsignedp)
5168 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
5169 tmode = GET_MODE (temp);
5170 if (tmode == VOIDmode)
5171 tmode = value_mode;
5172 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5173 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5174 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5176 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5177 NULL_RTX, value_mode, 0, align,
5178 total_size);
5180 return const0_rtx;
5182 else
5184 rtx addr = XEXP (target, 0);
5185 rtx to_rtx;
5187 /* If a value is wanted, it must be the lhs;
5188 so make the address stable for multiple use. */
5190 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5191 && ! CONSTANT_ADDRESS_P (addr)
5192 /* A frame-pointer reference is already stable. */
5193 && ! (GET_CODE (addr) == PLUS
5194 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5195 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5196 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5197 addr = copy_to_reg (addr);
5199 /* Now build a reference to just the desired component. */
5201 to_rtx = copy_rtx (change_address (target, mode,
5202 plus_constant (addr,
5203 (bitpos
5204 / BITS_PER_UNIT))));
5205 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5206 MEM_ALIAS_SET (to_rtx) = alias_set;
5208 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5212 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5213 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
5214 ARRAY_REFs and find the ultimate containing object, which we return.
5216 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5217 bit position, and *PUNSIGNEDP to the signedness of the field.
5218 If the position of the field is variable, we store a tree
5219 giving the variable offset (in units) in *POFFSET.
5220 This offset is in addition to the bit position.
5221 If the position is not variable, we store 0 in *POFFSET.
5222 We set *PALIGNMENT to the alignment of the address that will be
5223 computed. This is the alignment of the thing we return if *POFFSET
5224 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5226 If any of the extraction expressions is volatile,
5227 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5229 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5230 is a mode that can be used to access the field. In that case, *PBITSIZE
5231 is redundant.
5233 If the field describes a variable-sized object, *PMODE is set to
5234 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5235 this case, but the address of the object can be found. */
5237 tree
5238 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5239 punsignedp, pvolatilep, palignment)
5240 tree exp;
5241 HOST_WIDE_INT *pbitsize;
5242 HOST_WIDE_INT *pbitpos;
5243 tree *poffset;
5244 enum machine_mode *pmode;
5245 int *punsignedp;
5246 int *pvolatilep;
5247 unsigned int *palignment;
5249 tree size_tree = 0;
5250 enum machine_mode mode = VOIDmode;
5251 tree offset = size_zero_node;
5252 tree bit_offset = bitsize_zero_node;
5253 unsigned int alignment = BIGGEST_ALIGNMENT;
5254 tree tem;
5256 /* First get the mode, signedness, and size. We do this from just the
5257 outermost expression. */
5258 if (TREE_CODE (exp) == COMPONENT_REF)
5260 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5261 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5262 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5264 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5266 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5268 size_tree = TREE_OPERAND (exp, 1);
5269 *punsignedp = TREE_UNSIGNED (exp);
5271 else
5273 mode = TYPE_MODE (TREE_TYPE (exp));
5274 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5276 if (mode == BLKmode)
5277 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5278 else
5279 *pbitsize = GET_MODE_BITSIZE (mode);
5282 if (size_tree != 0)
5284 if (! host_integerp (size_tree, 1))
5285 mode = BLKmode, *pbitsize = -1;
5286 else
5287 *pbitsize = tree_low_cst (size_tree, 1);
5290 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5291 and find the ultimate containing object. */
5292 while (1)
5294 if (TREE_CODE (exp) == BIT_FIELD_REF)
5295 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5296 else if (TREE_CODE (exp) == COMPONENT_REF)
5298 tree field = TREE_OPERAND (exp, 1);
5299 tree this_offset = DECL_FIELD_OFFSET (field);
5301 /* If this field hasn't been filled in yet, don't go
5302 past it. This should only happen when folding expressions
5303 made during type construction. */
5304 if (this_offset == 0)
5305 break;
5306 else if (! TREE_CONSTANT (this_offset)
5307 && contains_placeholder_p (this_offset))
5308 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5310 offset = size_binop (PLUS_EXPR, offset, this_offset);
5311 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5312 DECL_FIELD_BIT_OFFSET (field));
5314 if (! host_integerp (offset, 0))
5315 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5318 else if (TREE_CODE (exp) == ARRAY_REF)
5320 tree index = TREE_OPERAND (exp, 1);
5321 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5322 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5323 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (exp));
5325 /* We assume all arrays have sizes that are a multiple of a byte.
5326 First subtract the lower bound, if any, in the type of the
5327 index, then convert to sizetype and multiply by the size of the
5328 array element. */
5329 if (low_bound != 0 && ! integer_zerop (low_bound))
5330 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5331 index, low_bound));
5333 /* If the index has a self-referential type, pass it to a
5334 WITH_RECORD_EXPR; if the component size is, pass our
5335 component to one. */
5336 if (! TREE_CONSTANT (index)
5337 && contains_placeholder_p (index))
5338 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5339 if (! TREE_CONSTANT (unit_size)
5340 && contains_placeholder_p (unit_size))
5341 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size,
5342 TREE_OPERAND (exp, 0));
5344 offset = size_binop (PLUS_EXPR, offset,
5345 size_binop (MULT_EXPR,
5346 convert (sizetype, index),
5347 unit_size));
5350 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5351 && ! ((TREE_CODE (exp) == NOP_EXPR
5352 || TREE_CODE (exp) == CONVERT_EXPR)
5353 && (TYPE_MODE (TREE_TYPE (exp))
5354 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5355 break;
5357 /* If any reference in the chain is volatile, the effect is volatile. */
5358 if (TREE_THIS_VOLATILE (exp))
5359 *pvolatilep = 1;
5361 /* If the offset is non-constant already, then we can't assume any
5362 alignment more than the alignment here. */
5363 if (! TREE_CONSTANT (offset))
5364 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5366 exp = TREE_OPERAND (exp, 0);
5369 if (DECL_P (exp))
5370 alignment = MIN (alignment, DECL_ALIGN (exp));
5371 else if (TREE_TYPE (exp) != 0)
5372 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5374 /* If OFFSET is constant, see if we can return the whole thing as a
5375 constant bit position. Otherwise, split it up. */
5376 if (host_integerp (offset, 0)
5377 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5378 bitsize_unit_node))
5379 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5380 && host_integerp (tem, 0))
5381 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5382 else
5383 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5385 *pmode = mode;
5386 *palignment = alignment;
5387 return exp;
5390 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5392 static enum memory_use_mode
5393 get_memory_usage_from_modifier (modifier)
5394 enum expand_modifier modifier;
5396 switch (modifier)
5398 case EXPAND_NORMAL:
5399 case EXPAND_SUM:
5400 return MEMORY_USE_RO;
5401 break;
5402 case EXPAND_MEMORY_USE_WO:
5403 return MEMORY_USE_WO;
5404 break;
5405 case EXPAND_MEMORY_USE_RW:
5406 return MEMORY_USE_RW;
5407 break;
5408 case EXPAND_MEMORY_USE_DONT:
5409 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5410 MEMORY_USE_DONT, because they are modifiers to a call of
5411 expand_expr in the ADDR_EXPR case of expand_expr. */
5412 case EXPAND_CONST_ADDRESS:
5413 case EXPAND_INITIALIZER:
5414 return MEMORY_USE_DONT;
5415 case EXPAND_MEMORY_USE_BAD:
5416 default:
5417 abort ();
5421 /* Given an rtx VALUE that may contain additions and multiplications, return
5422 an equivalent value that just refers to a register, memory, or constant.
5423 This is done by generating instructions to perform the arithmetic and
5424 returning a pseudo-register containing the value.
5426 The returned value may be a REG, SUBREG, MEM or constant. */
5429 force_operand (value, target)
5430 rtx value, target;
5432 register optab binoptab = 0;
5433 /* Use a temporary to force order of execution of calls to
5434 `force_operand'. */
5435 rtx tmp;
5436 register rtx op2;
5437 /* Use subtarget as the target for operand 0 of a binary operation. */
5438 register rtx subtarget = get_subtarget (target);
5440 /* Check for a PIC address load. */
5441 if (flag_pic
5442 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5443 && XEXP (value, 0) == pic_offset_table_rtx
5444 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5445 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5446 || GET_CODE (XEXP (value, 1)) == CONST))
5448 if (!subtarget)
5449 subtarget = gen_reg_rtx (GET_MODE (value));
5450 emit_move_insn (subtarget, value);
5451 return subtarget;
5454 if (GET_CODE (value) == PLUS)
5455 binoptab = add_optab;
5456 else if (GET_CODE (value) == MINUS)
5457 binoptab = sub_optab;
5458 else if (GET_CODE (value) == MULT)
5460 op2 = XEXP (value, 1);
5461 if (!CONSTANT_P (op2)
5462 && !(GET_CODE (op2) == REG && op2 != subtarget))
5463 subtarget = 0;
5464 tmp = force_operand (XEXP (value, 0), subtarget);
5465 return expand_mult (GET_MODE (value), tmp,
5466 force_operand (op2, NULL_RTX),
5467 target, 1);
5470 if (binoptab)
5472 op2 = XEXP (value, 1);
5473 if (!CONSTANT_P (op2)
5474 && !(GET_CODE (op2) == REG && op2 != subtarget))
5475 subtarget = 0;
5476 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5478 binoptab = add_optab;
5479 op2 = negate_rtx (GET_MODE (value), op2);
5482 /* Check for an addition with OP2 a constant integer and our first
5483 operand a PLUS of a virtual register and something else. In that
5484 case, we want to emit the sum of the virtual register and the
5485 constant first and then add the other value. This allows virtual
5486 register instantiation to simply modify the constant rather than
5487 creating another one around this addition. */
5488 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5489 && GET_CODE (XEXP (value, 0)) == PLUS
5490 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5491 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5492 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5494 rtx temp = expand_binop (GET_MODE (value), binoptab,
5495 XEXP (XEXP (value, 0), 0), op2,
5496 subtarget, 0, OPTAB_LIB_WIDEN);
5497 return expand_binop (GET_MODE (value), binoptab, temp,
5498 force_operand (XEXP (XEXP (value, 0), 1), 0),
5499 target, 0, OPTAB_LIB_WIDEN);
5502 tmp = force_operand (XEXP (value, 0), subtarget);
5503 return expand_binop (GET_MODE (value), binoptab, tmp,
5504 force_operand (op2, NULL_RTX),
5505 target, 0, OPTAB_LIB_WIDEN);
5506 /* We give UNSIGNEDP = 0 to expand_binop
5507 because the only operations we are expanding here are signed ones. */
5509 return value;
5512 /* Subroutine of expand_expr:
5513 save the non-copied parts (LIST) of an expr (LHS), and return a list
5514 which can restore these values to their previous values,
5515 should something modify their storage. */
5517 static tree
5518 save_noncopied_parts (lhs, list)
5519 tree lhs;
5520 tree list;
5522 tree tail;
5523 tree parts = 0;
5525 for (tail = list; tail; tail = TREE_CHAIN (tail))
5526 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5527 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5528 else
5530 tree part = TREE_VALUE (tail);
5531 tree part_type = TREE_TYPE (part);
5532 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5533 rtx target
5534 = assign_temp (build_qualified_type (part_type,
5535 (TYPE_QUALS (part_type)
5536 | TYPE_QUAL_CONST)),
5537 0, 1, 1);
5539 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5540 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5541 parts = tree_cons (to_be_saved,
5542 build (RTL_EXPR, part_type, NULL_TREE,
5543 (tree) target),
5544 parts);
5545 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5547 return parts;
5550 /* Subroutine of expand_expr:
5551 record the non-copied parts (LIST) of an expr (LHS), and return a list
5552 which specifies the initial values of these parts. */
5554 static tree
5555 init_noncopied_parts (lhs, list)
5556 tree lhs;
5557 tree list;
5559 tree tail;
5560 tree parts = 0;
5562 for (tail = list; tail; tail = TREE_CHAIN (tail))
5563 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5564 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5565 else if (TREE_PURPOSE (tail))
5567 tree part = TREE_VALUE (tail);
5568 tree part_type = TREE_TYPE (part);
5569 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5570 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5572 return parts;
5575 /* Subroutine of expand_expr: return nonzero iff there is no way that
5576 EXP can reference X, which is being modified. TOP_P is nonzero if this
5577 call is going to be used to determine whether we need a temporary
5578 for EXP, as opposed to a recursive call to this function.
5580 It is always safe for this routine to return zero since it merely
5581 searches for optimization opportunities. */
5584 safe_from_p (x, exp, top_p)
5585 rtx x;
5586 tree exp;
5587 int top_p;
5589 rtx exp_rtl = 0;
5590 int i, nops;
5591 static tree save_expr_list;
5593 if (x == 0
5594 /* If EXP has varying size, we MUST use a target since we currently
5595 have no way of allocating temporaries of variable size
5596 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5597 So we assume here that something at a higher level has prevented a
5598 clash. This is somewhat bogus, but the best we can do. Only
5599 do this when X is BLKmode and when we are at the top level. */
5600 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5601 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5602 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5603 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5604 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5605 != INTEGER_CST)
5606 && GET_MODE (x) == BLKmode)
5607 /* If X is in the outgoing argument area, it is always safe. */
5608 || (GET_CODE (x) == MEM
5609 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5610 || (GET_CODE (XEXP (x, 0)) == PLUS
5611 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5612 return 1;
5614 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5615 find the underlying pseudo. */
5616 if (GET_CODE (x) == SUBREG)
5618 x = SUBREG_REG (x);
5619 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5620 return 0;
5623 /* A SAVE_EXPR might appear many times in the expression passed to the
5624 top-level safe_from_p call, and if it has a complex subexpression,
5625 examining it multiple times could result in a combinatorial explosion.
5626 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5627 with optimization took about 28 minutes to compile -- even though it was
5628 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5629 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5630 we have processed. Note that the only test of top_p was above. */
5632 if (top_p)
5634 int rtn;
5635 tree t;
5637 save_expr_list = 0;
5639 rtn = safe_from_p (x, exp, 0);
5641 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5642 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5644 return rtn;
5647 /* Now look at our tree code and possibly recurse. */
5648 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5650 case 'd':
5651 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
5652 break;
5654 case 'c':
5655 return 1;
5657 case 'x':
5658 if (TREE_CODE (exp) == TREE_LIST)
5659 return ((TREE_VALUE (exp) == 0
5660 || safe_from_p (x, TREE_VALUE (exp), 0))
5661 && (TREE_CHAIN (exp) == 0
5662 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5663 else if (TREE_CODE (exp) == ERROR_MARK)
5664 return 1; /* An already-visited SAVE_EXPR? */
5665 else
5666 return 0;
5668 case '1':
5669 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5671 case '2':
5672 case '<':
5673 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5674 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5676 case 'e':
5677 case 'r':
5678 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5679 the expression. If it is set, we conflict iff we are that rtx or
5680 both are in memory. Otherwise, we check all operands of the
5681 expression recursively. */
5683 switch (TREE_CODE (exp))
5685 case ADDR_EXPR:
5686 return (staticp (TREE_OPERAND (exp, 0))
5687 || TREE_STATIC (exp)
5688 || safe_from_p (x, TREE_OPERAND (exp, 0), 0));
5690 case INDIRECT_REF:
5691 if (GET_CODE (x) == MEM
5692 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5693 get_alias_set (exp)))
5694 return 0;
5695 break;
5697 case CALL_EXPR:
5698 /* Assume that the call will clobber all hard registers and
5699 all of memory. */
5700 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5701 || GET_CODE (x) == MEM)
5702 return 0;
5703 break;
5705 case RTL_EXPR:
5706 /* If a sequence exists, we would have to scan every instruction
5707 in the sequence to see if it was safe. This is probably not
5708 worthwhile. */
5709 if (RTL_EXPR_SEQUENCE (exp))
5710 return 0;
5712 exp_rtl = RTL_EXPR_RTL (exp);
5713 break;
5715 case WITH_CLEANUP_EXPR:
5716 exp_rtl = RTL_EXPR_RTL (exp);
5717 break;
5719 case CLEANUP_POINT_EXPR:
5720 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5722 case SAVE_EXPR:
5723 exp_rtl = SAVE_EXPR_RTL (exp);
5724 if (exp_rtl)
5725 break;
5727 /* If we've already scanned this, don't do it again. Otherwise,
5728 show we've scanned it and record for clearing the flag if we're
5729 going on. */
5730 if (TREE_PRIVATE (exp))
5731 return 1;
5733 TREE_PRIVATE (exp) = 1;
5734 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5736 TREE_PRIVATE (exp) = 0;
5737 return 0;
5740 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5741 return 1;
5743 case BIND_EXPR:
5744 /* The only operand we look at is operand 1. The rest aren't
5745 part of the expression. */
5746 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5748 case METHOD_CALL_EXPR:
5749 /* This takes a rtx argument, but shouldn't appear here. */
5750 abort ();
5752 default:
5753 break;
5756 /* If we have an rtx, we do not need to scan our operands. */
5757 if (exp_rtl)
5758 break;
5760 nops = first_rtl_op (TREE_CODE (exp));
5761 for (i = 0; i < nops; i++)
5762 if (TREE_OPERAND (exp, i) != 0
5763 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5764 return 0;
5766 /* If this is a language-specific tree code, it may require
5767 special handling. */
5768 if ((unsigned int) TREE_CODE (exp)
5769 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5770 && lang_safe_from_p
5771 && !(*lang_safe_from_p) (x, exp))
5772 return 0;
5775 /* If we have an rtl, find any enclosed object. Then see if we conflict
5776 with it. */
5777 if (exp_rtl)
5779 if (GET_CODE (exp_rtl) == SUBREG)
5781 exp_rtl = SUBREG_REG (exp_rtl);
5782 if (GET_CODE (exp_rtl) == REG
5783 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5784 return 0;
5787 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5788 are memory and they conflict. */
5789 return ! (rtx_equal_p (x, exp_rtl)
5790 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5791 && true_dependence (exp_rtl, GET_MODE (x), x,
5792 rtx_addr_varies_p)));
5795 /* If we reach here, it is safe. */
5796 return 1;
5799 /* Subroutine of expand_expr: return nonzero iff EXP is an
5800 expression whose type is statically determinable. */
5802 static int
5803 fixed_type_p (exp)
5804 tree exp;
5806 if (TREE_CODE (exp) == PARM_DECL
5807 || TREE_CODE (exp) == VAR_DECL
5808 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5809 || TREE_CODE (exp) == COMPONENT_REF
5810 || TREE_CODE (exp) == ARRAY_REF)
5811 return 1;
5812 return 0;
5815 /* Subroutine of expand_expr: return rtx if EXP is a
5816 variable or parameter; else return 0. */
5818 static rtx
5819 var_rtx (exp)
5820 tree exp;
5822 STRIP_NOPS (exp);
5823 switch (TREE_CODE (exp))
5825 case PARM_DECL:
5826 case VAR_DECL:
5827 return DECL_RTL (exp);
5828 default:
5829 return 0;
5833 #ifdef MAX_INTEGER_COMPUTATION_MODE
5835 void
5836 check_max_integer_computation_mode (exp)
5837 tree exp;
5839 enum tree_code code;
5840 enum machine_mode mode;
5842 /* Strip any NOPs that don't change the mode. */
5843 STRIP_NOPS (exp);
5844 code = TREE_CODE (exp);
5846 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5847 if (code == NOP_EXPR
5848 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5849 return;
5851 /* First check the type of the overall operation. We need only look at
5852 unary, binary and relational operations. */
5853 if (TREE_CODE_CLASS (code) == '1'
5854 || TREE_CODE_CLASS (code) == '2'
5855 || TREE_CODE_CLASS (code) == '<')
5857 mode = TYPE_MODE (TREE_TYPE (exp));
5858 if (GET_MODE_CLASS (mode) == MODE_INT
5859 && mode > MAX_INTEGER_COMPUTATION_MODE)
5860 internal_error ("unsupported wide integer operation");
5863 /* Check operand of a unary op. */
5864 if (TREE_CODE_CLASS (code) == '1')
5866 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5867 if (GET_MODE_CLASS (mode) == MODE_INT
5868 && mode > MAX_INTEGER_COMPUTATION_MODE)
5869 internal_error ("unsupported wide integer operation");
5872 /* Check operands of a binary/comparison op. */
5873 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5875 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5876 if (GET_MODE_CLASS (mode) == MODE_INT
5877 && mode > MAX_INTEGER_COMPUTATION_MODE)
5878 internal_error ("unsupported wide integer operation");
5880 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5881 if (GET_MODE_CLASS (mode) == MODE_INT
5882 && mode > MAX_INTEGER_COMPUTATION_MODE)
5883 internal_error ("unsupported wide integer operation");
5886 #endif
5888 /* expand_expr: generate code for computing expression EXP.
5889 An rtx for the computed value is returned. The value is never null.
5890 In the case of a void EXP, const0_rtx is returned.
5892 The value may be stored in TARGET if TARGET is nonzero.
5893 TARGET is just a suggestion; callers must assume that
5894 the rtx returned may not be the same as TARGET.
5896 If TARGET is CONST0_RTX, it means that the value will be ignored.
5898 If TMODE is not VOIDmode, it suggests generating the
5899 result in mode TMODE. But this is done only when convenient.
5900 Otherwise, TMODE is ignored and the value generated in its natural mode.
5901 TMODE is just a suggestion; callers must assume that
5902 the rtx returned may not have mode TMODE.
5904 Note that TARGET may have neither TMODE nor MODE. In that case, it
5905 probably will not be used.
5907 If MODIFIER is EXPAND_SUM then when EXP is an addition
5908 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5909 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5910 products as above, or REG or MEM, or constant.
5911 Ordinarily in such cases we would output mul or add instructions
5912 and then return a pseudo reg containing the sum.
5914 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5915 it also marks a label as absolutely required (it can't be dead).
5916 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5917 This is used for outputting expressions used in initializers.
5919 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5920 with a constant address even if that address is not normally legitimate.
5921 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5924 expand_expr (exp, target, tmode, modifier)
5925 register tree exp;
5926 rtx target;
5927 enum machine_mode tmode;
5928 enum expand_modifier modifier;
5930 register rtx op0, op1, temp;
5931 tree type = TREE_TYPE (exp);
5932 int unsignedp = TREE_UNSIGNED (type);
5933 register enum machine_mode mode;
5934 register enum tree_code code = TREE_CODE (exp);
5935 optab this_optab;
5936 rtx subtarget, original_target;
5937 int ignore;
5938 tree context;
5939 /* Used by check-memory-usage to make modifier read only. */
5940 enum expand_modifier ro_modifier;
5942 /* Handle ERROR_MARK before anybody tries to access its type. */
5943 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
5945 op0 = CONST0_RTX (tmode);
5946 if (op0 != 0)
5947 return op0;
5948 return const0_rtx;
5951 mode = TYPE_MODE (type);
5952 /* Use subtarget as the target for operand 0 of a binary operation. */
5953 subtarget = get_subtarget (target);
5954 original_target = target;
5955 ignore = (target == const0_rtx
5956 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5957 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5958 || code == COND_EXPR)
5959 && TREE_CODE (type) == VOID_TYPE));
5961 /* Make a read-only version of the modifier. */
5962 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5963 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5964 ro_modifier = modifier;
5965 else
5966 ro_modifier = EXPAND_NORMAL;
5968 /* If we are going to ignore this result, we need only do something
5969 if there is a side-effect somewhere in the expression. If there
5970 is, short-circuit the most common cases here. Note that we must
5971 not call expand_expr with anything but const0_rtx in case this
5972 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5974 if (ignore)
5976 if (! TREE_SIDE_EFFECTS (exp))
5977 return const0_rtx;
5979 /* Ensure we reference a volatile object even if value is ignored, but
5980 don't do this if all we are doing is taking its address. */
5981 if (TREE_THIS_VOLATILE (exp)
5982 && TREE_CODE (exp) != FUNCTION_DECL
5983 && mode != VOIDmode && mode != BLKmode
5984 && modifier != EXPAND_CONST_ADDRESS)
5986 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5987 if (GET_CODE (temp) == MEM)
5988 temp = copy_to_reg (temp);
5989 return const0_rtx;
5992 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
5993 || code == INDIRECT_REF || code == BUFFER_REF)
5994 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5995 VOIDmode, ro_modifier);
5996 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
5997 || code == ARRAY_REF)
5999 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
6000 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
6001 return const0_rtx;
6003 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6004 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6005 /* If the second operand has no side effects, just evaluate
6006 the first. */
6007 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6008 VOIDmode, ro_modifier);
6009 else if (code == BIT_FIELD_REF)
6011 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
6012 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
6013 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
6014 return const0_rtx;
6017 target = 0;
6020 #ifdef MAX_INTEGER_COMPUTATION_MODE
6021 /* Only check stuff here if the mode we want is different from the mode
6022 of the expression; if it's the same, check_max_integer_computiation_mode
6023 will handle it. Do we really need to check this stuff at all? */
6025 if (target
6026 && GET_MODE (target) != mode
6027 && TREE_CODE (exp) != INTEGER_CST
6028 && TREE_CODE (exp) != PARM_DECL
6029 && TREE_CODE (exp) != ARRAY_REF
6030 && TREE_CODE (exp) != COMPONENT_REF
6031 && TREE_CODE (exp) != BIT_FIELD_REF
6032 && TREE_CODE (exp) != INDIRECT_REF
6033 && TREE_CODE (exp) != CALL_EXPR
6034 && TREE_CODE (exp) != VAR_DECL
6035 && TREE_CODE (exp) != RTL_EXPR)
6037 enum machine_mode mode = GET_MODE (target);
6039 if (GET_MODE_CLASS (mode) == MODE_INT
6040 && mode > MAX_INTEGER_COMPUTATION_MODE)
6041 internal_error ("unsupported wide integer operation");
6044 if (tmode != mode
6045 && TREE_CODE (exp) != INTEGER_CST
6046 && TREE_CODE (exp) != PARM_DECL
6047 && TREE_CODE (exp) != ARRAY_REF
6048 && TREE_CODE (exp) != COMPONENT_REF
6049 && TREE_CODE (exp) != BIT_FIELD_REF
6050 && TREE_CODE (exp) != INDIRECT_REF
6051 && TREE_CODE (exp) != VAR_DECL
6052 && TREE_CODE (exp) != CALL_EXPR
6053 && TREE_CODE (exp) != RTL_EXPR
6054 && GET_MODE_CLASS (tmode) == MODE_INT
6055 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6056 internal_error ("unsupported wide integer operation");
6058 check_max_integer_computation_mode (exp);
6059 #endif
6061 /* If will do cse, generate all results into pseudo registers
6062 since 1) that allows cse to find more things
6063 and 2) otherwise cse could produce an insn the machine
6064 cannot support. */
6066 if (! cse_not_expected && mode != BLKmode && target
6067 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
6068 target = subtarget;
6070 switch (code)
6072 case LABEL_DECL:
6074 tree function = decl_function_context (exp);
6075 /* Handle using a label in a containing function. */
6076 if (function != current_function_decl
6077 && function != inline_function_decl && function != 0)
6079 struct function *p = find_function_data (function);
6080 p->expr->x_forced_labels
6081 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6082 p->expr->x_forced_labels);
6084 else
6086 if (modifier == EXPAND_INITIALIZER)
6087 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6088 label_rtx (exp),
6089 forced_labels);
6092 temp = gen_rtx_MEM (FUNCTION_MODE,
6093 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6094 if (function != current_function_decl
6095 && function != inline_function_decl && function != 0)
6096 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6097 return temp;
6100 case PARM_DECL:
6101 if (DECL_RTL (exp) == 0)
6103 error_with_decl (exp, "prior parameter's size depends on `%s'");
6104 return CONST0_RTX (mode);
6107 /* ... fall through ... */
6109 case VAR_DECL:
6110 /* If a static var's type was incomplete when the decl was written,
6111 but the type is complete now, lay out the decl now. */
6112 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6113 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6115 layout_decl (exp, 0);
6116 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
6119 /* Although static-storage variables start off initialized, according to
6120 ANSI C, a memcpy could overwrite them with uninitialized values. So
6121 we check them too. This also lets us check for read-only variables
6122 accessed via a non-const declaration, in case it won't be detected
6123 any other way (e.g., in an embedded system or OS kernel without
6124 memory protection).
6126 Aggregates are not checked here; they're handled elsewhere. */
6127 if (cfun && current_function_check_memory_usage
6128 && code == VAR_DECL
6129 && GET_CODE (DECL_RTL (exp)) == MEM
6130 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6132 enum memory_use_mode memory_usage;
6133 memory_usage = get_memory_usage_from_modifier (modifier);
6135 in_check_memory_usage = 1;
6136 if (memory_usage != MEMORY_USE_DONT)
6137 emit_library_call (chkr_check_addr_libfunc,
6138 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6139 XEXP (DECL_RTL (exp), 0), Pmode,
6140 GEN_INT (int_size_in_bytes (type)),
6141 TYPE_MODE (sizetype),
6142 GEN_INT (memory_usage),
6143 TYPE_MODE (integer_type_node));
6144 in_check_memory_usage = 0;
6147 /* ... fall through ... */
6149 case FUNCTION_DECL:
6150 case RESULT_DECL:
6151 if (DECL_RTL (exp) == 0)
6152 abort ();
6154 /* Ensure variable marked as used even if it doesn't go through
6155 a parser. If it hasn't be used yet, write out an external
6156 definition. */
6157 if (! TREE_USED (exp))
6159 assemble_external (exp);
6160 TREE_USED (exp) = 1;
6163 /* Show we haven't gotten RTL for this yet. */
6164 temp = 0;
6166 /* Handle variables inherited from containing functions. */
6167 context = decl_function_context (exp);
6169 /* We treat inline_function_decl as an alias for the current function
6170 because that is the inline function whose vars, types, etc.
6171 are being merged into the current function.
6172 See expand_inline_function. */
6174 if (context != 0 && context != current_function_decl
6175 && context != inline_function_decl
6176 /* If var is static, we don't need a static chain to access it. */
6177 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6178 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6180 rtx addr;
6182 /* Mark as non-local and addressable. */
6183 DECL_NONLOCAL (exp) = 1;
6184 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6185 abort ();
6186 mark_addressable (exp);
6187 if (GET_CODE (DECL_RTL (exp)) != MEM)
6188 abort ();
6189 addr = XEXP (DECL_RTL (exp), 0);
6190 if (GET_CODE (addr) == MEM)
6191 addr = change_address (addr, Pmode,
6192 fix_lexical_addr (XEXP (addr, 0), exp));
6193 else
6194 addr = fix_lexical_addr (addr, exp);
6196 temp = change_address (DECL_RTL (exp), mode, addr);
6199 /* This is the case of an array whose size is to be determined
6200 from its initializer, while the initializer is still being parsed.
6201 See expand_decl. */
6203 else if (GET_CODE (DECL_RTL (exp)) == MEM
6204 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6205 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
6206 XEXP (DECL_RTL (exp), 0));
6208 /* If DECL_RTL is memory, we are in the normal case and either
6209 the address is not valid or it is not a register and -fforce-addr
6210 is specified, get the address into a register. */
6212 else if (GET_CODE (DECL_RTL (exp)) == MEM
6213 && modifier != EXPAND_CONST_ADDRESS
6214 && modifier != EXPAND_SUM
6215 && modifier != EXPAND_INITIALIZER
6216 && (! memory_address_p (DECL_MODE (exp),
6217 XEXP (DECL_RTL (exp), 0))
6218 || (flag_force_addr
6219 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6220 temp = change_address (DECL_RTL (exp), VOIDmode,
6221 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6223 /* If we got something, return it. But first, set the alignment
6224 if the address is a register. */
6225 if (temp != 0)
6227 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6228 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6230 return temp;
6233 /* If the mode of DECL_RTL does not match that of the decl, it
6234 must be a promoted value. We return a SUBREG of the wanted mode,
6235 but mark it so that we know that it was already extended. */
6237 if (GET_CODE (DECL_RTL (exp)) == REG
6238 && GET_MODE (DECL_RTL (exp)) != mode)
6240 /* Get the signedness used for this variable. Ensure we get the
6241 same mode we got when the variable was declared. */
6242 if (GET_MODE (DECL_RTL (exp))
6243 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6244 abort ();
6246 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
6247 SUBREG_PROMOTED_VAR_P (temp) = 1;
6248 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6249 return temp;
6252 return DECL_RTL (exp);
6254 case INTEGER_CST:
6255 return immed_double_const (TREE_INT_CST_LOW (exp),
6256 TREE_INT_CST_HIGH (exp), mode);
6258 case CONST_DECL:
6259 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6260 EXPAND_MEMORY_USE_BAD);
6262 case REAL_CST:
6263 /* If optimized, generate immediate CONST_DOUBLE
6264 which will be turned into memory by reload if necessary.
6266 We used to force a register so that loop.c could see it. But
6267 this does not allow gen_* patterns to perform optimizations with
6268 the constants. It also produces two insns in cases like "x = 1.0;".
6269 On most machines, floating-point constants are not permitted in
6270 many insns, so we'd end up copying it to a register in any case.
6272 Now, we do the copying in expand_binop, if appropriate. */
6273 return immed_real_const (exp);
6275 case COMPLEX_CST:
6276 case STRING_CST:
6277 if (! TREE_CST_RTL (exp))
6278 output_constant_def (exp, 1);
6280 /* TREE_CST_RTL probably contains a constant address.
6281 On RISC machines where a constant address isn't valid,
6282 make some insns to get that address into a register. */
6283 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6284 && modifier != EXPAND_CONST_ADDRESS
6285 && modifier != EXPAND_INITIALIZER
6286 && modifier != EXPAND_SUM
6287 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6288 || (flag_force_addr
6289 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6290 return change_address (TREE_CST_RTL (exp), VOIDmode,
6291 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6292 return TREE_CST_RTL (exp);
6294 case EXPR_WITH_FILE_LOCATION:
6296 rtx to_return;
6297 const char *saved_input_filename = input_filename;
6298 int saved_lineno = lineno;
6299 input_filename = EXPR_WFL_FILENAME (exp);
6300 lineno = EXPR_WFL_LINENO (exp);
6301 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6302 emit_line_note (input_filename, lineno);
6303 /* Possibly avoid switching back and force here. */
6304 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6305 input_filename = saved_input_filename;
6306 lineno = saved_lineno;
6307 return to_return;
6310 case SAVE_EXPR:
6311 context = decl_function_context (exp);
6313 /* If this SAVE_EXPR was at global context, assume we are an
6314 initialization function and move it into our context. */
6315 if (context == 0)
6316 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6318 /* We treat inline_function_decl as an alias for the current function
6319 because that is the inline function whose vars, types, etc.
6320 are being merged into the current function.
6321 See expand_inline_function. */
6322 if (context == current_function_decl || context == inline_function_decl)
6323 context = 0;
6325 /* If this is non-local, handle it. */
6326 if (context)
6328 /* The following call just exists to abort if the context is
6329 not of a containing function. */
6330 find_function_data (context);
6332 temp = SAVE_EXPR_RTL (exp);
6333 if (temp && GET_CODE (temp) == REG)
6335 put_var_into_stack (exp);
6336 temp = SAVE_EXPR_RTL (exp);
6338 if (temp == 0 || GET_CODE (temp) != MEM)
6339 abort ();
6340 return change_address (temp, mode,
6341 fix_lexical_addr (XEXP (temp, 0), exp));
6343 if (SAVE_EXPR_RTL (exp) == 0)
6345 if (mode == VOIDmode)
6346 temp = const0_rtx;
6347 else
6348 temp = assign_temp (build_qualified_type (type,
6349 (TYPE_QUALS (type)
6350 | TYPE_QUAL_CONST)),
6351 3, 0, 0);
6353 SAVE_EXPR_RTL (exp) = temp;
6354 if (!optimize && GET_CODE (temp) == REG)
6355 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6356 save_expr_regs);
6358 /* If the mode of TEMP does not match that of the expression, it
6359 must be a promoted value. We pass store_expr a SUBREG of the
6360 wanted mode but mark it so that we know that it was already
6361 extended. Note that `unsignedp' was modified above in
6362 this case. */
6364 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6366 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6367 SUBREG_PROMOTED_VAR_P (temp) = 1;
6368 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6371 if (temp == const0_rtx)
6372 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6373 EXPAND_MEMORY_USE_BAD);
6374 else
6375 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6377 TREE_USED (exp) = 1;
6380 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6381 must be a promoted value. We return a SUBREG of the wanted mode,
6382 but mark it so that we know that it was already extended. */
6384 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6385 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6387 /* Compute the signedness and make the proper SUBREG. */
6388 promote_mode (type, mode, &unsignedp, 0);
6389 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6390 SUBREG_PROMOTED_VAR_P (temp) = 1;
6391 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6392 return temp;
6395 return SAVE_EXPR_RTL (exp);
6397 case UNSAVE_EXPR:
6399 rtx temp;
6400 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6401 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6402 return temp;
6405 case PLACEHOLDER_EXPR:
6407 tree placeholder_expr;
6409 /* If there is an object on the head of the placeholder list,
6410 see if some object in it of type TYPE or a pointer to it. For
6411 further information, see tree.def. */
6412 for (placeholder_expr = placeholder_list;
6413 placeholder_expr != 0;
6414 placeholder_expr = TREE_CHAIN (placeholder_expr))
6416 tree need_type = TYPE_MAIN_VARIANT (type);
6417 tree object = 0;
6418 tree old_list = placeholder_list;
6419 tree elt;
6421 /* Find the outermost reference that is of the type we want.
6422 If none, see if any object has a type that is a pointer to
6423 the type we want. */
6424 for (elt = TREE_PURPOSE (placeholder_expr);
6425 elt != 0 && object == 0;
6427 = ((TREE_CODE (elt) == COMPOUND_EXPR
6428 || TREE_CODE (elt) == COND_EXPR)
6429 ? TREE_OPERAND (elt, 1)
6430 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6431 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6432 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6433 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6434 ? TREE_OPERAND (elt, 0) : 0))
6435 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6436 object = elt;
6438 for (elt = TREE_PURPOSE (placeholder_expr);
6439 elt != 0 && object == 0;
6441 = ((TREE_CODE (elt) == COMPOUND_EXPR
6442 || TREE_CODE (elt) == COND_EXPR)
6443 ? TREE_OPERAND (elt, 1)
6444 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6445 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6446 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6447 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6448 ? TREE_OPERAND (elt, 0) : 0))
6449 if (POINTER_TYPE_P (TREE_TYPE (elt))
6450 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6451 == need_type))
6452 object = build1 (INDIRECT_REF, need_type, elt);
6454 if (object != 0)
6456 /* Expand this object skipping the list entries before
6457 it was found in case it is also a PLACEHOLDER_EXPR.
6458 In that case, we want to translate it using subsequent
6459 entries. */
6460 placeholder_list = TREE_CHAIN (placeholder_expr);
6461 temp = expand_expr (object, original_target, tmode,
6462 ro_modifier);
6463 placeholder_list = old_list;
6464 return temp;
6469 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6470 abort ();
6472 case WITH_RECORD_EXPR:
6473 /* Put the object on the placeholder list, expand our first operand,
6474 and pop the list. */
6475 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6476 placeholder_list);
6477 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6478 tmode, ro_modifier);
6479 placeholder_list = TREE_CHAIN (placeholder_list);
6480 return target;
6482 case GOTO_EXPR:
6483 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6484 expand_goto (TREE_OPERAND (exp, 0));
6485 else
6486 expand_computed_goto (TREE_OPERAND (exp, 0));
6487 return const0_rtx;
6489 case EXIT_EXPR:
6490 expand_exit_loop_if_false (NULL_PTR,
6491 invert_truthvalue (TREE_OPERAND (exp, 0)));
6492 return const0_rtx;
6494 case LABELED_BLOCK_EXPR:
6495 if (LABELED_BLOCK_BODY (exp))
6496 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6497 /* Should perhaps use expand_label, but this is simpler and safer. */
6498 do_pending_stack_adjust ();
6499 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6500 return const0_rtx;
6502 case EXIT_BLOCK_EXPR:
6503 if (EXIT_BLOCK_RETURN (exp))
6504 sorry ("returned value in block_exit_expr");
6505 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6506 return const0_rtx;
6508 case LOOP_EXPR:
6509 push_temp_slots ();
6510 expand_start_loop (1);
6511 expand_expr_stmt (TREE_OPERAND (exp, 0));
6512 expand_end_loop ();
6513 pop_temp_slots ();
6515 return const0_rtx;
6517 case BIND_EXPR:
6519 tree vars = TREE_OPERAND (exp, 0);
6520 int vars_need_expansion = 0;
6522 /* Need to open a binding contour here because
6523 if there are any cleanups they must be contained here. */
6524 expand_start_bindings (2);
6526 /* Mark the corresponding BLOCK for output in its proper place. */
6527 if (TREE_OPERAND (exp, 2) != 0
6528 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6529 insert_block (TREE_OPERAND (exp, 2));
6531 /* If VARS have not yet been expanded, expand them now. */
6532 while (vars)
6534 if (!DECL_RTL_SET_P (vars))
6536 vars_need_expansion = 1;
6537 expand_decl (vars);
6539 expand_decl_init (vars);
6540 vars = TREE_CHAIN (vars);
6543 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6545 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6547 return temp;
6550 case RTL_EXPR:
6551 if (RTL_EXPR_SEQUENCE (exp))
6553 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6554 abort ();
6555 emit_insns (RTL_EXPR_SEQUENCE (exp));
6556 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6558 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6559 free_temps_for_rtl_expr (exp);
6560 return RTL_EXPR_RTL (exp);
6562 case CONSTRUCTOR:
6563 /* If we don't need the result, just ensure we evaluate any
6564 subexpressions. */
6565 if (ignore)
6567 tree elt;
6568 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6569 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6570 EXPAND_MEMORY_USE_BAD);
6571 return const0_rtx;
6574 /* All elts simple constants => refer to a constant in memory. But
6575 if this is a non-BLKmode mode, let it store a field at a time
6576 since that should make a CONST_INT or CONST_DOUBLE when we
6577 fold. Likewise, if we have a target we can use, it is best to
6578 store directly into the target unless the type is large enough
6579 that memcpy will be used. If we are making an initializer and
6580 all operands are constant, put it in memory as well. */
6581 else if ((TREE_STATIC (exp)
6582 && ((mode == BLKmode
6583 && ! (target != 0 && safe_from_p (target, exp, 1)))
6584 || TREE_ADDRESSABLE (exp)
6585 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6586 && (! MOVE_BY_PIECES_P
6587 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6588 TYPE_ALIGN (type)))
6589 && ! mostly_zeros_p (exp))))
6590 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6592 rtx constructor = output_constant_def (exp, 1);
6594 if (modifier != EXPAND_CONST_ADDRESS
6595 && modifier != EXPAND_INITIALIZER
6596 && modifier != EXPAND_SUM
6597 && (! memory_address_p (GET_MODE (constructor),
6598 XEXP (constructor, 0))
6599 || (flag_force_addr
6600 && GET_CODE (XEXP (constructor, 0)) != REG)))
6601 constructor = change_address (constructor, VOIDmode,
6602 XEXP (constructor, 0));
6603 return constructor;
6605 else
6607 /* Handle calls that pass values in multiple non-contiguous
6608 locations. The Irix 6 ABI has examples of this. */
6609 if (target == 0 || ! safe_from_p (target, exp, 1)
6610 || GET_CODE (target) == PARALLEL)
6611 target
6612 = assign_temp (build_qualified_type (type,
6613 (TYPE_QUALS (type)
6614 | (TREE_READONLY (exp)
6615 * TYPE_QUAL_CONST))),
6616 TREE_ADDRESSABLE (exp), 1, 1);
6618 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6619 int_size_in_bytes (TREE_TYPE (exp)));
6620 return target;
6623 case INDIRECT_REF:
6625 tree exp1 = TREE_OPERAND (exp, 0);
6626 tree index;
6627 tree string = string_constant (exp1, &index);
6629 /* Try to optimize reads from const strings. */
6630 if (string
6631 && TREE_CODE (string) == STRING_CST
6632 && TREE_CODE (index) == INTEGER_CST
6633 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6634 && GET_MODE_CLASS (mode) == MODE_INT
6635 && GET_MODE_SIZE (mode) == 1
6636 && modifier != EXPAND_MEMORY_USE_WO)
6637 return
6638 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6640 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6641 op0 = memory_address (mode, op0);
6643 if (cfun && current_function_check_memory_usage
6644 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6646 enum memory_use_mode memory_usage;
6647 memory_usage = get_memory_usage_from_modifier (modifier);
6649 if (memory_usage != MEMORY_USE_DONT)
6651 in_check_memory_usage = 1;
6652 emit_library_call (chkr_check_addr_libfunc,
6653 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6654 Pmode, GEN_INT (int_size_in_bytes (type)),
6655 TYPE_MODE (sizetype),
6656 GEN_INT (memory_usage),
6657 TYPE_MODE (integer_type_node));
6658 in_check_memory_usage = 0;
6662 temp = gen_rtx_MEM (mode, op0);
6663 set_mem_attributes (temp, exp, 0);
6665 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6666 here, because, in C and C++, the fact that a location is accessed
6667 through a pointer to const does not mean that the value there can
6668 never change. Languages where it can never change should
6669 also set TREE_STATIC. */
6670 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6672 /* If we are writing to this object and its type is a record with
6673 readonly fields, we must mark it as readonly so it will
6674 conflict with readonly references to those fields. */
6675 if (modifier == EXPAND_MEMORY_USE_WO && readonly_fields_p (type))
6676 RTX_UNCHANGING_P (temp) = 1;
6678 return temp;
6681 case ARRAY_REF:
6682 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6683 abort ();
6686 tree array = TREE_OPERAND (exp, 0);
6687 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6688 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6689 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6690 HOST_WIDE_INT i;
6692 /* Optimize the special-case of a zero lower bound.
6694 We convert the low_bound to sizetype to avoid some problems
6695 with constant folding. (E.g. suppose the lower bound is 1,
6696 and its mode is QI. Without the conversion, (ARRAY
6697 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6698 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6700 if (! integer_zerop (low_bound))
6701 index = size_diffop (index, convert (sizetype, low_bound));
6703 /* Fold an expression like: "foo"[2].
6704 This is not done in fold so it won't happen inside &.
6705 Don't fold if this is for wide characters since it's too
6706 difficult to do correctly and this is a very rare case. */
6708 if (TREE_CODE (array) == STRING_CST
6709 && TREE_CODE (index) == INTEGER_CST
6710 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6711 && GET_MODE_CLASS (mode) == MODE_INT
6712 && GET_MODE_SIZE (mode) == 1)
6713 return
6714 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6716 /* If this is a constant index into a constant array,
6717 just get the value from the array. Handle both the cases when
6718 we have an explicit constructor and when our operand is a variable
6719 that was declared const. */
6721 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6722 && TREE_CODE (index) == INTEGER_CST
6723 && 0 > compare_tree_int (index,
6724 list_length (CONSTRUCTOR_ELTS
6725 (TREE_OPERAND (exp, 0)))))
6727 tree elem;
6729 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6730 i = TREE_INT_CST_LOW (index);
6731 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6734 if (elem)
6735 return expand_expr (fold (TREE_VALUE (elem)), target,
6736 tmode, ro_modifier);
6739 else if (optimize >= 1
6740 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6741 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6742 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6744 if (TREE_CODE (index) == INTEGER_CST)
6746 tree init = DECL_INITIAL (array);
6748 if (TREE_CODE (init) == CONSTRUCTOR)
6750 tree elem;
6752 for (elem = CONSTRUCTOR_ELTS (init);
6753 (elem
6754 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6755 elem = TREE_CHAIN (elem))
6758 if (elem && !TREE_SIDE_EFFECTS (elem))
6759 return expand_expr (fold (TREE_VALUE (elem)), target,
6760 tmode, ro_modifier);
6762 else if (TREE_CODE (init) == STRING_CST
6763 && 0 > compare_tree_int (index,
6764 TREE_STRING_LENGTH (init)))
6766 tree type = TREE_TYPE (TREE_TYPE (init));
6767 enum machine_mode mode = TYPE_MODE (type);
6769 if (GET_MODE_CLASS (mode) == MODE_INT
6770 && GET_MODE_SIZE (mode) == 1)
6771 return (GEN_INT
6772 (TREE_STRING_POINTER
6773 (init)[TREE_INT_CST_LOW (index)]));
6778 /* Fall through. */
6780 case COMPONENT_REF:
6781 case BIT_FIELD_REF:
6782 /* If the operand is a CONSTRUCTOR, we can just extract the
6783 appropriate field if it is present. Don't do this if we have
6784 already written the data since we want to refer to that copy
6785 and varasm.c assumes that's what we'll do. */
6786 if (code != ARRAY_REF
6787 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6788 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6790 tree elt;
6792 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6793 elt = TREE_CHAIN (elt))
6794 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6795 /* We can normally use the value of the field in the
6796 CONSTRUCTOR. However, if this is a bitfield in
6797 an integral mode that we can fit in a HOST_WIDE_INT,
6798 we must mask only the number of bits in the bitfield,
6799 since this is done implicitly by the constructor. If
6800 the bitfield does not meet either of those conditions,
6801 we can't do this optimization. */
6802 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6803 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6804 == MODE_INT)
6805 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6806 <= HOST_BITS_PER_WIDE_INT))))
6808 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6809 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6811 HOST_WIDE_INT bitsize
6812 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6814 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6816 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6817 op0 = expand_and (op0, op1, target);
6819 else
6821 enum machine_mode imode
6822 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6823 tree count
6824 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6827 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6828 target, 0);
6829 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6830 target, 0);
6834 return op0;
6839 enum machine_mode mode1;
6840 HOST_WIDE_INT bitsize, bitpos;
6841 tree offset;
6842 int volatilep = 0;
6843 unsigned int alignment;
6844 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6845 &mode1, &unsignedp, &volatilep,
6846 &alignment);
6848 /* If we got back the original object, something is wrong. Perhaps
6849 we are evaluating an expression too early. In any event, don't
6850 infinitely recurse. */
6851 if (tem == exp)
6852 abort ();
6854 /* If TEM's type is a union of variable size, pass TARGET to the inner
6855 computation, since it will need a temporary and TARGET is known
6856 to have to do. This occurs in unchecked conversion in Ada. */
6858 op0 = expand_expr (tem,
6859 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6860 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6861 != INTEGER_CST)
6862 ? target : NULL_RTX),
6863 VOIDmode,
6864 (modifier == EXPAND_INITIALIZER
6865 || modifier == EXPAND_CONST_ADDRESS)
6866 ? modifier : EXPAND_NORMAL);
6868 /* If this is a constant, put it into a register if it is a
6869 legitimate constant and OFFSET is 0 and memory if it isn't. */
6870 if (CONSTANT_P (op0))
6872 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6873 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6874 && offset == 0)
6875 op0 = force_reg (mode, op0);
6876 else
6877 op0 = validize_mem (force_const_mem (mode, op0));
6880 if (offset != 0)
6882 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6884 /* If this object is in memory, put it into a register.
6885 This case can't occur in C, but can in Ada if we have
6886 unchecked conversion of an expression from a scalar type to
6887 an array or record type. */
6888 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6889 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6891 tree nt = build_qualified_type (TREE_TYPE (tem),
6892 (TYPE_QUALS (TREE_TYPE (tem))
6893 | TYPE_QUAL_CONST));
6894 rtx memloc = assign_temp (nt, 1, 1, 1);
6896 mark_temp_addr_taken (memloc);
6897 emit_move_insn (memloc, op0);
6898 op0 = memloc;
6901 if (GET_CODE (op0) != MEM)
6902 abort ();
6904 if (GET_MODE (offset_rtx) != ptr_mode)
6906 #ifdef POINTERS_EXTEND_UNSIGNED
6907 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6908 #else
6909 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6910 #endif
6913 /* A constant address in OP0 can have VOIDmode, we must not try
6914 to call force_reg for that case. Avoid that case. */
6915 if (GET_CODE (op0) == MEM
6916 && GET_MODE (op0) == BLKmode
6917 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6918 && bitsize != 0
6919 && (bitpos % bitsize) == 0
6920 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6921 && alignment == GET_MODE_ALIGNMENT (mode1))
6923 rtx temp = change_address (op0, mode1,
6924 plus_constant (XEXP (op0, 0),
6925 (bitpos /
6926 BITS_PER_UNIT)));
6927 if (GET_CODE (XEXP (temp, 0)) == REG)
6928 op0 = temp;
6929 else
6930 op0 = change_address (op0, mode1,
6931 force_reg (GET_MODE (XEXP (temp, 0)),
6932 XEXP (temp, 0)));
6933 bitpos = 0;
6936 op0 = change_address (op0, VOIDmode,
6937 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6938 force_reg (ptr_mode,
6939 offset_rtx)));
6942 /* Don't forget about volatility even if this is a bitfield. */
6943 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6945 op0 = copy_rtx (op0);
6946 MEM_VOLATILE_P (op0) = 1;
6949 /* Check the access. */
6950 if (cfun != 0 && current_function_check_memory_usage
6951 && GET_CODE (op0) == MEM)
6953 enum memory_use_mode memory_usage;
6954 memory_usage = get_memory_usage_from_modifier (modifier);
6956 if (memory_usage != MEMORY_USE_DONT)
6958 rtx to;
6959 int size;
6961 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6962 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6964 /* Check the access right of the pointer. */
6965 in_check_memory_usage = 1;
6966 if (size > BITS_PER_UNIT)
6967 emit_library_call (chkr_check_addr_libfunc,
6968 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
6969 Pmode, GEN_INT (size / BITS_PER_UNIT),
6970 TYPE_MODE (sizetype),
6971 GEN_INT (memory_usage),
6972 TYPE_MODE (integer_type_node));
6973 in_check_memory_usage = 0;
6977 /* In cases where an aligned union has an unaligned object
6978 as a field, we might be extracting a BLKmode value from
6979 an integer-mode (e.g., SImode) object. Handle this case
6980 by doing the extract into an object as wide as the field
6981 (which we know to be the width of a basic mode), then
6982 storing into memory, and changing the mode to BLKmode.
6983 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6984 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6985 if (mode1 == VOIDmode
6986 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6987 || (modifier != EXPAND_CONST_ADDRESS
6988 && modifier != EXPAND_INITIALIZER
6989 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6990 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6991 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6992 /* If the field isn't aligned enough to fetch as a memref,
6993 fetch it as a bit field. */
6994 || (mode1 != BLKmode
6995 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
6996 && ((TYPE_ALIGN (TREE_TYPE (tem))
6997 < GET_MODE_ALIGNMENT (mode))
6998 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
6999 /* If the type and the field are a constant size and the
7000 size of the type isn't the same size as the bitfield,
7001 we must use bitfield operations. */
7002 || ((bitsize >= 0
7003 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7004 == INTEGER_CST)
7005 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7006 bitsize)))))
7007 || (modifier != EXPAND_CONST_ADDRESS
7008 && modifier != EXPAND_INITIALIZER
7009 && mode == BLKmode
7010 && SLOW_UNALIGNED_ACCESS (mode, alignment)
7011 && (TYPE_ALIGN (type) > alignment
7012 || bitpos % TYPE_ALIGN (type) != 0)))
7014 enum machine_mode ext_mode = mode;
7016 if (ext_mode == BLKmode
7017 && ! (target != 0 && GET_CODE (op0) == MEM
7018 && GET_CODE (target) == MEM
7019 && bitpos % BITS_PER_UNIT == 0))
7020 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7022 if (ext_mode == BLKmode)
7024 /* In this case, BITPOS must start at a byte boundary and
7025 TARGET, if specified, must be a MEM. */
7026 if (GET_CODE (op0) != MEM
7027 || (target != 0 && GET_CODE (target) != MEM)
7028 || bitpos % BITS_PER_UNIT != 0)
7029 abort ();
7031 op0 = change_address (op0, VOIDmode,
7032 plus_constant (XEXP (op0, 0),
7033 bitpos / BITS_PER_UNIT));
7034 if (target == 0)
7035 target = assign_temp (type, 0, 1, 1);
7037 emit_block_move (target, op0,
7038 bitsize == -1 ? expr_size (exp)
7039 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7040 / BITS_PER_UNIT),
7041 BITS_PER_UNIT);
7043 return target;
7046 op0 = validize_mem (op0);
7048 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7049 mark_reg_pointer (XEXP (op0, 0), alignment);
7051 op0 = extract_bit_field (op0, bitsize, bitpos,
7052 unsignedp, target, ext_mode, ext_mode,
7053 alignment,
7054 int_size_in_bytes (TREE_TYPE (tem)));
7056 /* If the result is a record type and BITSIZE is narrower than
7057 the mode of OP0, an integral mode, and this is a big endian
7058 machine, we must put the field into the high-order bits. */
7059 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7060 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7061 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
7062 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7063 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7064 - bitsize),
7065 op0, 1);
7067 if (mode == BLKmode)
7069 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
7070 TYPE_QUAL_CONST);
7071 rtx new = assign_temp (nt, 0, 1, 1);
7073 emit_move_insn (new, op0);
7074 op0 = copy_rtx (new);
7075 PUT_MODE (op0, BLKmode);
7078 return op0;
7081 /* If the result is BLKmode, use that to access the object
7082 now as well. */
7083 if (mode == BLKmode)
7084 mode1 = BLKmode;
7086 /* Get a reference to just this component. */
7087 if (modifier == EXPAND_CONST_ADDRESS
7088 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7090 rtx new = gen_rtx_MEM (mode1,
7091 plus_constant (XEXP (op0, 0),
7092 (bitpos / BITS_PER_UNIT)));
7094 MEM_COPY_ATTRIBUTES (new, op0);
7095 op0 = new;
7097 else
7098 op0 = change_address (op0, mode1,
7099 plus_constant (XEXP (op0, 0),
7100 (bitpos / BITS_PER_UNIT)));
7102 set_mem_attributes (op0, exp, 0);
7103 if (GET_CODE (XEXP (op0, 0)) == REG)
7104 mark_reg_pointer (XEXP (op0, 0), alignment);
7106 MEM_VOLATILE_P (op0) |= volatilep;
7107 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7108 || modifier == EXPAND_CONST_ADDRESS
7109 || modifier == EXPAND_INITIALIZER)
7110 return op0;
7111 else if (target == 0)
7112 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7114 convert_move (target, op0, unsignedp);
7115 return target;
7118 /* Intended for a reference to a buffer of a file-object in Pascal.
7119 But it's not certain that a special tree code will really be
7120 necessary for these. INDIRECT_REF might work for them. */
7121 case BUFFER_REF:
7122 abort ();
7124 case IN_EXPR:
7126 /* Pascal set IN expression.
7128 Algorithm:
7129 rlo = set_low - (set_low%bits_per_word);
7130 the_word = set [ (index - rlo)/bits_per_word ];
7131 bit_index = index % bits_per_word;
7132 bitmask = 1 << bit_index;
7133 return !!(the_word & bitmask); */
7135 tree set = TREE_OPERAND (exp, 0);
7136 tree index = TREE_OPERAND (exp, 1);
7137 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7138 tree set_type = TREE_TYPE (set);
7139 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7140 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7141 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7142 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7143 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7144 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7145 rtx setaddr = XEXP (setval, 0);
7146 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7147 rtx rlow;
7148 rtx diff, quo, rem, addr, bit, result;
7150 /* If domain is empty, answer is no. Likewise if index is constant
7151 and out of bounds. */
7152 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7153 && TREE_CODE (set_low_bound) == INTEGER_CST
7154 && tree_int_cst_lt (set_high_bound, set_low_bound))
7155 || (TREE_CODE (index) == INTEGER_CST
7156 && TREE_CODE (set_low_bound) == INTEGER_CST
7157 && tree_int_cst_lt (index, set_low_bound))
7158 || (TREE_CODE (set_high_bound) == INTEGER_CST
7159 && TREE_CODE (index) == INTEGER_CST
7160 && tree_int_cst_lt (set_high_bound, index))))
7161 return const0_rtx;
7163 if (target == 0)
7164 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7166 /* If we get here, we have to generate the code for both cases
7167 (in range and out of range). */
7169 op0 = gen_label_rtx ();
7170 op1 = gen_label_rtx ();
7172 if (! (GET_CODE (index_val) == CONST_INT
7173 && GET_CODE (lo_r) == CONST_INT))
7175 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7176 GET_MODE (index_val), iunsignedp, 0, op1);
7179 if (! (GET_CODE (index_val) == CONST_INT
7180 && GET_CODE (hi_r) == CONST_INT))
7182 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7183 GET_MODE (index_val), iunsignedp, 0, op1);
7186 /* Calculate the element number of bit zero in the first word
7187 of the set. */
7188 if (GET_CODE (lo_r) == CONST_INT)
7189 rlow = GEN_INT (INTVAL (lo_r)
7190 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7191 else
7192 rlow = expand_binop (index_mode, and_optab, lo_r,
7193 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7194 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7196 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7197 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7199 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7200 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7201 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7202 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7204 addr = memory_address (byte_mode,
7205 expand_binop (index_mode, add_optab, diff,
7206 setaddr, NULL_RTX, iunsignedp,
7207 OPTAB_LIB_WIDEN));
7209 /* Extract the bit we want to examine. */
7210 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7211 gen_rtx_MEM (byte_mode, addr),
7212 make_tree (TREE_TYPE (index), rem),
7213 NULL_RTX, 1);
7214 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7215 GET_MODE (target) == byte_mode ? target : 0,
7216 1, OPTAB_LIB_WIDEN);
7218 if (result != target)
7219 convert_move (target, result, 1);
7221 /* Output the code to handle the out-of-range case. */
7222 emit_jump (op0);
7223 emit_label (op1);
7224 emit_move_insn (target, const0_rtx);
7225 emit_label (op0);
7226 return target;
7229 case WITH_CLEANUP_EXPR:
7230 if (RTL_EXPR_RTL (exp) == 0)
7232 RTL_EXPR_RTL (exp)
7233 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7234 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
7236 /* That's it for this cleanup. */
7237 TREE_OPERAND (exp, 2) = 0;
7239 return RTL_EXPR_RTL (exp);
7241 case CLEANUP_POINT_EXPR:
7243 /* Start a new binding layer that will keep track of all cleanup
7244 actions to be performed. */
7245 expand_start_bindings (2);
7247 target_temp_slot_level = temp_slot_level;
7249 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7250 /* If we're going to use this value, load it up now. */
7251 if (! ignore)
7252 op0 = force_not_mem (op0);
7253 preserve_temp_slots (op0);
7254 expand_end_bindings (NULL_TREE, 0, 0);
7256 return op0;
7258 case CALL_EXPR:
7259 /* Check for a built-in function. */
7260 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7261 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7262 == FUNCTION_DECL)
7263 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7265 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7266 == BUILT_IN_FRONTEND)
7267 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7268 else
7269 return expand_builtin (exp, target, subtarget, tmode, ignore);
7272 return expand_call (exp, target, ignore);
7274 case NON_LVALUE_EXPR:
7275 case NOP_EXPR:
7276 case CONVERT_EXPR:
7277 case REFERENCE_EXPR:
7278 if (TREE_OPERAND (exp, 0) == error_mark_node)
7279 return const0_rtx;
7281 if (TREE_CODE (type) == UNION_TYPE)
7283 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7285 /* If both input and output are BLKmode, this conversion
7286 isn't actually doing anything unless we need to make the
7287 alignment stricter. */
7288 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7289 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7290 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7291 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7292 modifier);
7294 if (target == 0)
7295 target = assign_temp (type, 0, 1, 1);
7297 if (GET_CODE (target) == MEM)
7298 /* Store data into beginning of memory target. */
7299 store_expr (TREE_OPERAND (exp, 0),
7300 change_address (target, TYPE_MODE (valtype), 0), 0);
7302 else if (GET_CODE (target) == REG)
7303 /* Store this field into a union of the proper type. */
7304 store_field (target,
7305 MIN ((int_size_in_bytes (TREE_TYPE
7306 (TREE_OPERAND (exp, 0)))
7307 * BITS_PER_UNIT),
7308 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7309 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7310 VOIDmode, 0, BITS_PER_UNIT,
7311 int_size_in_bytes (type), 0);
7312 else
7313 abort ();
7315 /* Return the entire union. */
7316 return target;
7319 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7321 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7322 ro_modifier);
7324 /* If the signedness of the conversion differs and OP0 is
7325 a promoted SUBREG, clear that indication since we now
7326 have to do the proper extension. */
7327 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7328 && GET_CODE (op0) == SUBREG)
7329 SUBREG_PROMOTED_VAR_P (op0) = 0;
7331 return op0;
7334 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7335 if (GET_MODE (op0) == mode)
7336 return op0;
7338 /* If OP0 is a constant, just convert it into the proper mode. */
7339 if (CONSTANT_P (op0))
7340 return
7341 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7342 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7344 if (modifier == EXPAND_INITIALIZER)
7345 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7347 if (target == 0)
7348 return
7349 convert_to_mode (mode, op0,
7350 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7351 else
7352 convert_move (target, op0,
7353 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7354 return target;
7356 case PLUS_EXPR:
7357 /* We come here from MINUS_EXPR when the second operand is a
7358 constant. */
7359 plus_expr:
7360 this_optab = ! unsignedp && flag_trapv
7361 && (GET_MODE_CLASS(mode) == MODE_INT)
7362 ? addv_optab : add_optab;
7364 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7365 something else, make sure we add the register to the constant and
7366 then to the other thing. This case can occur during strength
7367 reduction and doing it this way will produce better code if the
7368 frame pointer or argument pointer is eliminated.
7370 fold-const.c will ensure that the constant is always in the inner
7371 PLUS_EXPR, so the only case we need to do anything about is if
7372 sp, ap, or fp is our second argument, in which case we must swap
7373 the innermost first argument and our second argument. */
7375 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7376 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7377 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7378 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7379 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7380 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7382 tree t = TREE_OPERAND (exp, 1);
7384 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7385 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7388 /* If the result is to be ptr_mode and we are adding an integer to
7389 something, we might be forming a constant. So try to use
7390 plus_constant. If it produces a sum and we can't accept it,
7391 use force_operand. This allows P = &ARR[const] to generate
7392 efficient code on machines where a SYMBOL_REF is not a valid
7393 address.
7395 If this is an EXPAND_SUM call, always return the sum. */
7396 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7397 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7399 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7400 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7401 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7403 rtx constant_part;
7405 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7406 EXPAND_SUM);
7407 /* Use immed_double_const to ensure that the constant is
7408 truncated according to the mode of OP1, then sign extended
7409 to a HOST_WIDE_INT. Using the constant directly can result
7410 in non-canonical RTL in a 64x32 cross compile. */
7411 constant_part
7412 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7413 (HOST_WIDE_INT) 0,
7414 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7415 op1 = plus_constant (op1, INTVAL (constant_part));
7416 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7417 op1 = force_operand (op1, target);
7418 return op1;
7421 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7422 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7423 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7425 rtx constant_part;
7427 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7428 EXPAND_SUM);
7429 if (! CONSTANT_P (op0))
7431 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7432 VOIDmode, modifier);
7433 /* Don't go to both_summands if modifier
7434 says it's not right to return a PLUS. */
7435 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7436 goto binop2;
7437 goto both_summands;
7439 /* Use immed_double_const to ensure that the constant is
7440 truncated according to the mode of OP1, then sign extended
7441 to a HOST_WIDE_INT. Using the constant directly can result
7442 in non-canonical RTL in a 64x32 cross compile. */
7443 constant_part
7444 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7445 (HOST_WIDE_INT) 0,
7446 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7447 op0 = plus_constant (op0, INTVAL (constant_part));
7448 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7449 op0 = force_operand (op0, target);
7450 return op0;
7454 /* No sense saving up arithmetic to be done
7455 if it's all in the wrong mode to form part of an address.
7456 And force_operand won't know whether to sign-extend or
7457 zero-extend. */
7458 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7459 || mode != ptr_mode)
7460 goto binop;
7462 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7463 subtarget = 0;
7465 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7466 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7468 both_summands:
7469 /* Make sure any term that's a sum with a constant comes last. */
7470 if (GET_CODE (op0) == PLUS
7471 && CONSTANT_P (XEXP (op0, 1)))
7473 temp = op0;
7474 op0 = op1;
7475 op1 = temp;
7477 /* If adding to a sum including a constant,
7478 associate it to put the constant outside. */
7479 if (GET_CODE (op1) == PLUS
7480 && CONSTANT_P (XEXP (op1, 1)))
7482 rtx constant_term = const0_rtx;
7484 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7485 if (temp != 0)
7486 op0 = temp;
7487 /* Ensure that MULT comes first if there is one. */
7488 else if (GET_CODE (op0) == MULT)
7489 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7490 else
7491 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7493 /* Let's also eliminate constants from op0 if possible. */
7494 op0 = eliminate_constant_term (op0, &constant_term);
7496 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7497 their sum should be a constant. Form it into OP1, since the
7498 result we want will then be OP0 + OP1. */
7500 temp = simplify_binary_operation (PLUS, mode, constant_term,
7501 XEXP (op1, 1));
7502 if (temp != 0)
7503 op1 = temp;
7504 else
7505 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7508 /* Put a constant term last and put a multiplication first. */
7509 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7510 temp = op1, op1 = op0, op0 = temp;
7512 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7513 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7515 case MINUS_EXPR:
7516 /* For initializers, we are allowed to return a MINUS of two
7517 symbolic constants. Here we handle all cases when both operands
7518 are constant. */
7519 /* Handle difference of two symbolic constants,
7520 for the sake of an initializer. */
7521 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7522 && really_constant_p (TREE_OPERAND (exp, 0))
7523 && really_constant_p (TREE_OPERAND (exp, 1)))
7525 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7526 VOIDmode, ro_modifier);
7527 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7528 VOIDmode, ro_modifier);
7530 /* If the last operand is a CONST_INT, use plus_constant of
7531 the negated constant. Else make the MINUS. */
7532 if (GET_CODE (op1) == CONST_INT)
7533 return plus_constant (op0, - INTVAL (op1));
7534 else
7535 return gen_rtx_MINUS (mode, op0, op1);
7537 /* Convert A - const to A + (-const). */
7538 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7540 tree negated = fold (build1 (NEGATE_EXPR, type,
7541 TREE_OPERAND (exp, 1)));
7543 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7544 /* If we can't negate the constant in TYPE, leave it alone and
7545 expand_binop will negate it for us. We used to try to do it
7546 here in the signed version of TYPE, but that doesn't work
7547 on POINTER_TYPEs. */;
7548 else
7550 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7551 goto plus_expr;
7554 this_optab = ! unsignedp && flag_trapv
7555 && (GET_MODE_CLASS(mode) == MODE_INT)
7556 ? subv_optab : sub_optab;
7557 goto binop;
7559 case MULT_EXPR:
7560 /* If first operand is constant, swap them.
7561 Thus the following special case checks need only
7562 check the second operand. */
7563 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7565 register tree t1 = TREE_OPERAND (exp, 0);
7566 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7567 TREE_OPERAND (exp, 1) = t1;
7570 /* Attempt to return something suitable for generating an
7571 indexed address, for machines that support that. */
7573 if (modifier == EXPAND_SUM && mode == ptr_mode
7574 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7575 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7577 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7578 EXPAND_SUM);
7580 /* Apply distributive law if OP0 is x+c. */
7581 if (GET_CODE (op0) == PLUS
7582 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7583 return
7584 gen_rtx_PLUS
7585 (mode,
7586 gen_rtx_MULT
7587 (mode, XEXP (op0, 0),
7588 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7589 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7590 * INTVAL (XEXP (op0, 1))));
7592 if (GET_CODE (op0) != REG)
7593 op0 = force_operand (op0, NULL_RTX);
7594 if (GET_CODE (op0) != REG)
7595 op0 = copy_to_mode_reg (mode, op0);
7597 return
7598 gen_rtx_MULT (mode, op0,
7599 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7602 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7603 subtarget = 0;
7605 /* Check for multiplying things that have been extended
7606 from a narrower type. If this machine supports multiplying
7607 in that narrower type with a result in the desired type,
7608 do it that way, and avoid the explicit type-conversion. */
7609 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7610 && TREE_CODE (type) == INTEGER_TYPE
7611 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7612 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7613 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7614 && int_fits_type_p (TREE_OPERAND (exp, 1),
7615 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7616 /* Don't use a widening multiply if a shift will do. */
7617 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7618 > HOST_BITS_PER_WIDE_INT)
7619 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7621 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7622 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7624 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7625 /* If both operands are extended, they must either both
7626 be zero-extended or both be sign-extended. */
7627 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7629 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7631 enum machine_mode innermode
7632 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7633 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7634 ? smul_widen_optab : umul_widen_optab);
7635 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7636 ? umul_widen_optab : smul_widen_optab);
7637 if (mode == GET_MODE_WIDER_MODE (innermode))
7639 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7641 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7642 NULL_RTX, VOIDmode, 0);
7643 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7644 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7645 VOIDmode, 0);
7646 else
7647 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7648 NULL_RTX, VOIDmode, 0);
7649 goto binop2;
7651 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7652 && innermode == word_mode)
7654 rtx htem;
7655 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7656 NULL_RTX, VOIDmode, 0);
7657 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7658 op1 = convert_modes (innermode, mode,
7659 expand_expr (TREE_OPERAND (exp, 1),
7660 NULL_RTX, VOIDmode, 0),
7661 unsignedp);
7662 else
7663 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7664 NULL_RTX, VOIDmode, 0);
7665 temp = expand_binop (mode, other_optab, op0, op1, target,
7666 unsignedp, OPTAB_LIB_WIDEN);
7667 htem = expand_mult_highpart_adjust (innermode,
7668 gen_highpart (innermode, temp),
7669 op0, op1,
7670 gen_highpart (innermode, temp),
7671 unsignedp);
7672 emit_move_insn (gen_highpart (innermode, temp), htem);
7673 return temp;
7677 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7678 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7679 return expand_mult (mode, op0, op1, target, unsignedp);
7681 case TRUNC_DIV_EXPR:
7682 case FLOOR_DIV_EXPR:
7683 case CEIL_DIV_EXPR:
7684 case ROUND_DIV_EXPR:
7685 case EXACT_DIV_EXPR:
7686 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7687 subtarget = 0;
7688 /* Possible optimization: compute the dividend with EXPAND_SUM
7689 then if the divisor is constant can optimize the case
7690 where some terms of the dividend have coeffs divisible by it. */
7691 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7692 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7693 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7695 case RDIV_EXPR:
7696 this_optab = flodiv_optab;
7697 goto binop;
7699 case TRUNC_MOD_EXPR:
7700 case FLOOR_MOD_EXPR:
7701 case CEIL_MOD_EXPR:
7702 case ROUND_MOD_EXPR:
7703 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7704 subtarget = 0;
7705 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7706 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7707 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7709 case FIX_ROUND_EXPR:
7710 case FIX_FLOOR_EXPR:
7711 case FIX_CEIL_EXPR:
7712 abort (); /* Not used for C. */
7714 case FIX_TRUNC_EXPR:
7715 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7716 if (target == 0)
7717 target = gen_reg_rtx (mode);
7718 expand_fix (target, op0, unsignedp);
7719 return target;
7721 case FLOAT_EXPR:
7722 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7723 if (target == 0)
7724 target = gen_reg_rtx (mode);
7725 /* expand_float can't figure out what to do if FROM has VOIDmode.
7726 So give it the correct mode. With -O, cse will optimize this. */
7727 if (GET_MODE (op0) == VOIDmode)
7728 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7729 op0);
7730 expand_float (target, op0,
7731 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7732 return target;
7734 case NEGATE_EXPR:
7735 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7736 temp = expand_unop (mode,
7737 ! unsignedp && flag_trapv
7738 && (GET_MODE_CLASS(mode) == MODE_INT)
7739 ? negv_optab : neg_optab, op0, target, 0);
7740 if (temp == 0)
7741 abort ();
7742 return temp;
7744 case ABS_EXPR:
7745 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7747 /* Handle complex values specially. */
7748 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7749 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7750 return expand_complex_abs (mode, op0, target, unsignedp);
7752 /* Unsigned abs is simply the operand. Testing here means we don't
7753 risk generating incorrect code below. */
7754 if (TREE_UNSIGNED (type))
7755 return op0;
7757 return expand_abs (mode, op0, target, unsignedp,
7758 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7760 case MAX_EXPR:
7761 case MIN_EXPR:
7762 target = original_target;
7763 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7764 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7765 || GET_MODE (target) != mode
7766 || (GET_CODE (target) == REG
7767 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7768 target = gen_reg_rtx (mode);
7769 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7770 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7772 /* First try to do it with a special MIN or MAX instruction.
7773 If that does not win, use a conditional jump to select the proper
7774 value. */
7775 this_optab = (TREE_UNSIGNED (type)
7776 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7777 : (code == MIN_EXPR ? smin_optab : smax_optab));
7779 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7780 OPTAB_WIDEN);
7781 if (temp != 0)
7782 return temp;
7784 /* At this point, a MEM target is no longer useful; we will get better
7785 code without it. */
7787 if (GET_CODE (target) == MEM)
7788 target = gen_reg_rtx (mode);
7790 if (target != op0)
7791 emit_move_insn (target, op0);
7793 op0 = gen_label_rtx ();
7795 /* If this mode is an integer too wide to compare properly,
7796 compare word by word. Rely on cse to optimize constant cases. */
7797 if (GET_MODE_CLASS (mode) == MODE_INT
7798 && ! can_compare_p (GE, mode, ccp_jump))
7800 if (code == MAX_EXPR)
7801 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7802 target, op1, NULL_RTX, op0);
7803 else
7804 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7805 op1, target, NULL_RTX, op0);
7807 else
7809 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7810 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7811 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7812 op0);
7814 emit_move_insn (target, op1);
7815 emit_label (op0);
7816 return target;
7818 case BIT_NOT_EXPR:
7819 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7820 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7821 if (temp == 0)
7822 abort ();
7823 return temp;
7825 case FFS_EXPR:
7826 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7827 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7828 if (temp == 0)
7829 abort ();
7830 return temp;
7832 /* ??? Can optimize bitwise operations with one arg constant.
7833 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7834 and (a bitwise1 b) bitwise2 b (etc)
7835 but that is probably not worth while. */
7837 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7838 boolean values when we want in all cases to compute both of them. In
7839 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7840 as actual zero-or-1 values and then bitwise anding. In cases where
7841 there cannot be any side effects, better code would be made by
7842 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7843 how to recognize those cases. */
7845 case TRUTH_AND_EXPR:
7846 case BIT_AND_EXPR:
7847 this_optab = and_optab;
7848 goto binop;
7850 case TRUTH_OR_EXPR:
7851 case BIT_IOR_EXPR:
7852 this_optab = ior_optab;
7853 goto binop;
7855 case TRUTH_XOR_EXPR:
7856 case BIT_XOR_EXPR:
7857 this_optab = xor_optab;
7858 goto binop;
7860 case LSHIFT_EXPR:
7861 case RSHIFT_EXPR:
7862 case LROTATE_EXPR:
7863 case RROTATE_EXPR:
7864 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7865 subtarget = 0;
7866 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7867 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7868 unsignedp);
7870 /* Could determine the answer when only additive constants differ. Also,
7871 the addition of one can be handled by changing the condition. */
7872 case LT_EXPR:
7873 case LE_EXPR:
7874 case GT_EXPR:
7875 case GE_EXPR:
7876 case EQ_EXPR:
7877 case NE_EXPR:
7878 case UNORDERED_EXPR:
7879 case ORDERED_EXPR:
7880 case UNLT_EXPR:
7881 case UNLE_EXPR:
7882 case UNGT_EXPR:
7883 case UNGE_EXPR:
7884 case UNEQ_EXPR:
7885 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7886 if (temp != 0)
7887 return temp;
7889 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7890 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7891 && original_target
7892 && GET_CODE (original_target) == REG
7893 && (GET_MODE (original_target)
7894 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7896 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7897 VOIDmode, 0);
7899 if (temp != original_target)
7900 temp = copy_to_reg (temp);
7902 op1 = gen_label_rtx ();
7903 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7904 GET_MODE (temp), unsignedp, 0, op1);
7905 emit_move_insn (temp, const1_rtx);
7906 emit_label (op1);
7907 return temp;
7910 /* If no set-flag instruction, must generate a conditional
7911 store into a temporary variable. Drop through
7912 and handle this like && and ||. */
7914 case TRUTH_ANDIF_EXPR:
7915 case TRUTH_ORIF_EXPR:
7916 if (! ignore
7917 && (target == 0 || ! safe_from_p (target, exp, 1)
7918 /* Make sure we don't have a hard reg (such as function's return
7919 value) live across basic blocks, if not optimizing. */
7920 || (!optimize && GET_CODE (target) == REG
7921 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7922 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7924 if (target)
7925 emit_clr_insn (target);
7927 op1 = gen_label_rtx ();
7928 jumpifnot (exp, op1);
7930 if (target)
7931 emit_0_to_1_insn (target);
7933 emit_label (op1);
7934 return ignore ? const0_rtx : target;
7936 case TRUTH_NOT_EXPR:
7937 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7938 /* The parser is careful to generate TRUTH_NOT_EXPR
7939 only with operands that are always zero or one. */
7940 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7941 target, 1, OPTAB_LIB_WIDEN);
7942 if (temp == 0)
7943 abort ();
7944 return temp;
7946 case COMPOUND_EXPR:
7947 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7948 emit_queue ();
7949 return expand_expr (TREE_OPERAND (exp, 1),
7950 (ignore ? const0_rtx : target),
7951 VOIDmode, 0);
7953 case COND_EXPR:
7954 /* If we would have a "singleton" (see below) were it not for a
7955 conversion in each arm, bring that conversion back out. */
7956 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7957 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7958 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7959 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7961 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7962 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7964 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
7965 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
7966 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
7967 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
7968 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
7969 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
7970 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
7971 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
7972 return expand_expr (build1 (NOP_EXPR, type,
7973 build (COND_EXPR, TREE_TYPE (iftrue),
7974 TREE_OPERAND (exp, 0),
7975 iftrue, iffalse)),
7976 target, tmode, modifier);
7980 /* Note that COND_EXPRs whose type is a structure or union
7981 are required to be constructed to contain assignments of
7982 a temporary variable, so that we can evaluate them here
7983 for side effect only. If type is void, we must do likewise. */
7985 /* If an arm of the branch requires a cleanup,
7986 only that cleanup is performed. */
7988 tree singleton = 0;
7989 tree binary_op = 0, unary_op = 0;
7991 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7992 convert it to our mode, if necessary. */
7993 if (integer_onep (TREE_OPERAND (exp, 1))
7994 && integer_zerop (TREE_OPERAND (exp, 2))
7995 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7997 if (ignore)
7999 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8000 ro_modifier);
8001 return const0_rtx;
8004 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
8005 if (GET_MODE (op0) == mode)
8006 return op0;
8008 if (target == 0)
8009 target = gen_reg_rtx (mode);
8010 convert_move (target, op0, unsignedp);
8011 return target;
8014 /* Check for X ? A + B : A. If we have this, we can copy A to the
8015 output and conditionally add B. Similarly for unary operations.
8016 Don't do this if X has side-effects because those side effects
8017 might affect A or B and the "?" operation is a sequence point in
8018 ANSI. (operand_equal_p tests for side effects.) */
8020 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8021 && operand_equal_p (TREE_OPERAND (exp, 2),
8022 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8023 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8024 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8025 && operand_equal_p (TREE_OPERAND (exp, 1),
8026 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8027 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8028 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8029 && operand_equal_p (TREE_OPERAND (exp, 2),
8030 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8031 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8032 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8033 && operand_equal_p (TREE_OPERAND (exp, 1),
8034 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8035 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8037 /* If we are not to produce a result, we have no target. Otherwise,
8038 if a target was specified use it; it will not be used as an
8039 intermediate target unless it is safe. If no target, use a
8040 temporary. */
8042 if (ignore)
8043 temp = 0;
8044 else if (original_target
8045 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8046 || (singleton && GET_CODE (original_target) == REG
8047 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8048 && original_target == var_rtx (singleton)))
8049 && GET_MODE (original_target) == mode
8050 #ifdef HAVE_conditional_move
8051 && (! can_conditionally_move_p (mode)
8052 || GET_CODE (original_target) == REG
8053 || TREE_ADDRESSABLE (type))
8054 #endif
8055 && ! (GET_CODE (original_target) == MEM
8056 && MEM_VOLATILE_P (original_target)))
8057 temp = original_target;
8058 else if (TREE_ADDRESSABLE (type))
8059 abort ();
8060 else
8061 temp = assign_temp (type, 0, 0, 1);
8063 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8064 do the test of X as a store-flag operation, do this as
8065 A + ((X != 0) << log C). Similarly for other simple binary
8066 operators. Only do for C == 1 if BRANCH_COST is low. */
8067 if (temp && singleton && binary_op
8068 && (TREE_CODE (binary_op) == PLUS_EXPR
8069 || TREE_CODE (binary_op) == MINUS_EXPR
8070 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8071 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8072 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8073 : integer_onep (TREE_OPERAND (binary_op, 1)))
8074 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8076 rtx result;
8077 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8078 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8079 ? addv_optab : add_optab)
8080 : TREE_CODE (binary_op) == MINUS_EXPR
8081 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8082 ? subv_optab : sub_optab)
8083 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8084 : xor_optab);
8086 /* If we had X ? A : A + 1, do this as A + (X == 0).
8088 We have to invert the truth value here and then put it
8089 back later if do_store_flag fails. We cannot simply copy
8090 TREE_OPERAND (exp, 0) to another variable and modify that
8091 because invert_truthvalue can modify the tree pointed to
8092 by its argument. */
8093 if (singleton == TREE_OPERAND (exp, 1))
8094 TREE_OPERAND (exp, 0)
8095 = invert_truthvalue (TREE_OPERAND (exp, 0));
8097 result = do_store_flag (TREE_OPERAND (exp, 0),
8098 (safe_from_p (temp, singleton, 1)
8099 ? temp : NULL_RTX),
8100 mode, BRANCH_COST <= 1);
8102 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8103 result = expand_shift (LSHIFT_EXPR, mode, result,
8104 build_int_2 (tree_log2
8105 (TREE_OPERAND
8106 (binary_op, 1)),
8108 (safe_from_p (temp, singleton, 1)
8109 ? temp : NULL_RTX), 0);
8111 if (result)
8113 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8114 return expand_binop (mode, boptab, op1, result, temp,
8115 unsignedp, OPTAB_LIB_WIDEN);
8117 else if (singleton == TREE_OPERAND (exp, 1))
8118 TREE_OPERAND (exp, 0)
8119 = invert_truthvalue (TREE_OPERAND (exp, 0));
8122 do_pending_stack_adjust ();
8123 NO_DEFER_POP;
8124 op0 = gen_label_rtx ();
8126 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8128 if (temp != 0)
8130 /* If the target conflicts with the other operand of the
8131 binary op, we can't use it. Also, we can't use the target
8132 if it is a hard register, because evaluating the condition
8133 might clobber it. */
8134 if ((binary_op
8135 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8136 || (GET_CODE (temp) == REG
8137 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8138 temp = gen_reg_rtx (mode);
8139 store_expr (singleton, temp, 0);
8141 else
8142 expand_expr (singleton,
8143 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8144 if (singleton == TREE_OPERAND (exp, 1))
8145 jumpif (TREE_OPERAND (exp, 0), op0);
8146 else
8147 jumpifnot (TREE_OPERAND (exp, 0), op0);
8149 start_cleanup_deferral ();
8150 if (binary_op && temp == 0)
8151 /* Just touch the other operand. */
8152 expand_expr (TREE_OPERAND (binary_op, 1),
8153 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8154 else if (binary_op)
8155 store_expr (build (TREE_CODE (binary_op), type,
8156 make_tree (type, temp),
8157 TREE_OPERAND (binary_op, 1)),
8158 temp, 0);
8159 else
8160 store_expr (build1 (TREE_CODE (unary_op), type,
8161 make_tree (type, temp)),
8162 temp, 0);
8163 op1 = op0;
8165 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8166 comparison operator. If we have one of these cases, set the
8167 output to A, branch on A (cse will merge these two references),
8168 then set the output to FOO. */
8169 else if (temp
8170 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8171 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8172 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8173 TREE_OPERAND (exp, 1), 0)
8174 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8175 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8176 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8178 if (GET_CODE (temp) == REG
8179 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8180 temp = gen_reg_rtx (mode);
8181 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8182 jumpif (TREE_OPERAND (exp, 0), op0);
8184 start_cleanup_deferral ();
8185 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8186 op1 = op0;
8188 else if (temp
8189 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8190 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8191 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8192 TREE_OPERAND (exp, 2), 0)
8193 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8194 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8195 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8197 if (GET_CODE (temp) == REG
8198 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8199 temp = gen_reg_rtx (mode);
8200 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8201 jumpifnot (TREE_OPERAND (exp, 0), op0);
8203 start_cleanup_deferral ();
8204 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8205 op1 = op0;
8207 else
8209 op1 = gen_label_rtx ();
8210 jumpifnot (TREE_OPERAND (exp, 0), op0);
8212 start_cleanup_deferral ();
8214 /* One branch of the cond can be void, if it never returns. For
8215 example A ? throw : E */
8216 if (temp != 0
8217 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8218 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8219 else
8220 expand_expr (TREE_OPERAND (exp, 1),
8221 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8222 end_cleanup_deferral ();
8223 emit_queue ();
8224 emit_jump_insn (gen_jump (op1));
8225 emit_barrier ();
8226 emit_label (op0);
8227 start_cleanup_deferral ();
8228 if (temp != 0
8229 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8230 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8231 else
8232 expand_expr (TREE_OPERAND (exp, 2),
8233 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8236 end_cleanup_deferral ();
8238 emit_queue ();
8239 emit_label (op1);
8240 OK_DEFER_POP;
8242 return temp;
8245 case TARGET_EXPR:
8247 /* Something needs to be initialized, but we didn't know
8248 where that thing was when building the tree. For example,
8249 it could be the return value of a function, or a parameter
8250 to a function which lays down in the stack, or a temporary
8251 variable which must be passed by reference.
8253 We guarantee that the expression will either be constructed
8254 or copied into our original target. */
8256 tree slot = TREE_OPERAND (exp, 0);
8257 tree cleanups = NULL_TREE;
8258 tree exp1;
8260 if (TREE_CODE (slot) != VAR_DECL)
8261 abort ();
8263 if (! ignore)
8264 target = original_target;
8266 /* Set this here so that if we get a target that refers to a
8267 register variable that's already been used, put_reg_into_stack
8268 knows that it should fix up those uses. */
8269 TREE_USED (slot) = 1;
8271 if (target == 0)
8273 if (DECL_RTL_SET_P (slot))
8275 target = DECL_RTL (slot);
8276 /* If we have already expanded the slot, so don't do
8277 it again. (mrs) */
8278 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8279 return target;
8281 else
8283 target = assign_temp (type, 2, 0, 1);
8284 /* All temp slots at this level must not conflict. */
8285 preserve_temp_slots (target);
8286 SET_DECL_RTL (slot, target);
8287 if (TREE_ADDRESSABLE (slot))
8288 put_var_into_stack (slot);
8290 /* Since SLOT is not known to the called function
8291 to belong to its stack frame, we must build an explicit
8292 cleanup. This case occurs when we must build up a reference
8293 to pass the reference as an argument. In this case,
8294 it is very likely that such a reference need not be
8295 built here. */
8297 if (TREE_OPERAND (exp, 2) == 0)
8298 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8299 cleanups = TREE_OPERAND (exp, 2);
8302 else
8304 /* This case does occur, when expanding a parameter which
8305 needs to be constructed on the stack. The target
8306 is the actual stack address that we want to initialize.
8307 The function we call will perform the cleanup in this case. */
8309 /* If we have already assigned it space, use that space,
8310 not target that we were passed in, as our target
8311 parameter is only a hint. */
8312 if (DECL_RTL_SET_P (slot))
8314 target = DECL_RTL (slot);
8315 /* If we have already expanded the slot, so don't do
8316 it again. (mrs) */
8317 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8318 return target;
8320 else
8322 SET_DECL_RTL (slot, target);
8323 /* If we must have an addressable slot, then make sure that
8324 the RTL that we just stored in slot is OK. */
8325 if (TREE_ADDRESSABLE (slot))
8326 put_var_into_stack (slot);
8330 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8331 /* Mark it as expanded. */
8332 TREE_OPERAND (exp, 1) = NULL_TREE;
8334 store_expr (exp1, target, 0);
8336 expand_decl_cleanup (NULL_TREE, cleanups);
8338 return target;
8341 case INIT_EXPR:
8343 tree lhs = TREE_OPERAND (exp, 0);
8344 tree rhs = TREE_OPERAND (exp, 1);
8345 tree noncopied_parts = 0;
8346 tree lhs_type = TREE_TYPE (lhs);
8348 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8349 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8350 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8351 TYPE_NONCOPIED_PARTS (lhs_type));
8352 while (noncopied_parts != 0)
8354 expand_assignment (TREE_VALUE (noncopied_parts),
8355 TREE_PURPOSE (noncopied_parts), 0, 0);
8356 noncopied_parts = TREE_CHAIN (noncopied_parts);
8358 return temp;
8361 case MODIFY_EXPR:
8363 /* If lhs is complex, expand calls in rhs before computing it.
8364 That's so we don't compute a pointer and save it over a call.
8365 If lhs is simple, compute it first so we can give it as a
8366 target if the rhs is just a call. This avoids an extra temp and copy
8367 and that prevents a partial-subsumption which makes bad code.
8368 Actually we could treat component_ref's of vars like vars. */
8370 tree lhs = TREE_OPERAND (exp, 0);
8371 tree rhs = TREE_OPERAND (exp, 1);
8372 tree noncopied_parts = 0;
8373 tree lhs_type = TREE_TYPE (lhs);
8375 temp = 0;
8377 /* Check for |= or &= of a bitfield of size one into another bitfield
8378 of size 1. In this case, (unless we need the result of the
8379 assignment) we can do this more efficiently with a
8380 test followed by an assignment, if necessary.
8382 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8383 things change so we do, this code should be enhanced to
8384 support it. */
8385 if (ignore
8386 && TREE_CODE (lhs) == COMPONENT_REF
8387 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8388 || TREE_CODE (rhs) == BIT_AND_EXPR)
8389 && TREE_OPERAND (rhs, 0) == lhs
8390 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8391 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8392 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8394 rtx label = gen_label_rtx ();
8396 do_jump (TREE_OPERAND (rhs, 1),
8397 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8398 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8399 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8400 (TREE_CODE (rhs) == BIT_IOR_EXPR
8401 ? integer_one_node
8402 : integer_zero_node)),
8403 0, 0);
8404 do_pending_stack_adjust ();
8405 emit_label (label);
8406 return const0_rtx;
8409 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8410 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8411 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8412 TYPE_NONCOPIED_PARTS (lhs_type));
8414 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8415 while (noncopied_parts != 0)
8417 expand_assignment (TREE_PURPOSE (noncopied_parts),
8418 TREE_VALUE (noncopied_parts), 0, 0);
8419 noncopied_parts = TREE_CHAIN (noncopied_parts);
8421 return temp;
8424 case RETURN_EXPR:
8425 if (!TREE_OPERAND (exp, 0))
8426 expand_null_return ();
8427 else
8428 expand_return (TREE_OPERAND (exp, 0));
8429 return const0_rtx;
8431 case PREINCREMENT_EXPR:
8432 case PREDECREMENT_EXPR:
8433 return expand_increment (exp, 0, ignore);
8435 case POSTINCREMENT_EXPR:
8436 case POSTDECREMENT_EXPR:
8437 /* Faster to treat as pre-increment if result is not used. */
8438 return expand_increment (exp, ! ignore, ignore);
8440 case ADDR_EXPR:
8441 /* If nonzero, TEMP will be set to the address of something that might
8442 be a MEM corresponding to a stack slot. */
8443 temp = 0;
8445 /* Are we taking the address of a nested function? */
8446 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8447 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8448 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8449 && ! TREE_STATIC (exp))
8451 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8452 op0 = force_operand (op0, target);
8454 /* If we are taking the address of something erroneous, just
8455 return a zero. */
8456 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8457 return const0_rtx;
8458 else
8460 /* We make sure to pass const0_rtx down if we came in with
8461 ignore set, to avoid doing the cleanups twice for something. */
8462 op0 = expand_expr (TREE_OPERAND (exp, 0),
8463 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8464 (modifier == EXPAND_INITIALIZER
8465 ? modifier : EXPAND_CONST_ADDRESS));
8467 /* If we are going to ignore the result, OP0 will have been set
8468 to const0_rtx, so just return it. Don't get confused and
8469 think we are taking the address of the constant. */
8470 if (ignore)
8471 return op0;
8473 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8474 clever and returns a REG when given a MEM. */
8475 op0 = protect_from_queue (op0, 1);
8477 /* We would like the object in memory. If it is a constant, we can
8478 have it be statically allocated into memory. For a non-constant,
8479 we need to allocate some memory and store the value into it. */
8481 if (CONSTANT_P (op0))
8482 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8483 op0);
8484 else if (GET_CODE (op0) == MEM)
8486 mark_temp_addr_taken (op0);
8487 temp = XEXP (op0, 0);
8490 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8491 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8492 || GET_CODE (op0) == PARALLEL)
8494 /* If this object is in a register, it must be not
8495 be BLKmode. */
8496 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8497 tree nt = build_qualified_type (inner_type,
8498 (TYPE_QUALS (inner_type)
8499 | TYPE_QUAL_CONST));
8500 rtx memloc = assign_temp (nt, 1, 1, 1);
8502 mark_temp_addr_taken (memloc);
8503 if (GET_CODE (op0) == PARALLEL)
8504 /* Handle calls that pass values in multiple non-contiguous
8505 locations. The Irix 6 ABI has examples of this. */
8506 emit_group_store (memloc, op0,
8507 int_size_in_bytes (inner_type),
8508 TYPE_ALIGN (inner_type));
8509 else
8510 emit_move_insn (memloc, op0);
8511 op0 = memloc;
8514 if (GET_CODE (op0) != MEM)
8515 abort ();
8517 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8519 temp = XEXP (op0, 0);
8520 #ifdef POINTERS_EXTEND_UNSIGNED
8521 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8522 && mode == ptr_mode)
8523 temp = convert_memory_address (ptr_mode, temp);
8524 #endif
8525 return temp;
8528 op0 = force_operand (XEXP (op0, 0), target);
8531 if (flag_force_addr && GET_CODE (op0) != REG)
8532 op0 = force_reg (Pmode, op0);
8534 if (GET_CODE (op0) == REG
8535 && ! REG_USERVAR_P (op0))
8536 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8538 /* If we might have had a temp slot, add an equivalent address
8539 for it. */
8540 if (temp != 0)
8541 update_temp_slot_address (temp, op0);
8543 #ifdef POINTERS_EXTEND_UNSIGNED
8544 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8545 && mode == ptr_mode)
8546 op0 = convert_memory_address (ptr_mode, op0);
8547 #endif
8549 return op0;
8551 case ENTRY_VALUE_EXPR:
8552 abort ();
8554 /* COMPLEX type for Extended Pascal & Fortran */
8555 case COMPLEX_EXPR:
8557 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8558 rtx insns;
8560 /* Get the rtx code of the operands. */
8561 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8562 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8564 if (! target)
8565 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8567 start_sequence ();
8569 /* Move the real (op0) and imaginary (op1) parts to their location. */
8570 emit_move_insn (gen_realpart (mode, target), op0);
8571 emit_move_insn (gen_imagpart (mode, target), op1);
8573 insns = get_insns ();
8574 end_sequence ();
8576 /* Complex construction should appear as a single unit. */
8577 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8578 each with a separate pseudo as destination.
8579 It's not correct for flow to treat them as a unit. */
8580 if (GET_CODE (target) != CONCAT)
8581 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8582 else
8583 emit_insns (insns);
8585 return target;
8588 case REALPART_EXPR:
8589 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8590 return gen_realpart (mode, op0);
8592 case IMAGPART_EXPR:
8593 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8594 return gen_imagpart (mode, op0);
8596 case CONJ_EXPR:
8598 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8599 rtx imag_t;
8600 rtx insns;
8602 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8604 if (! target)
8605 target = gen_reg_rtx (mode);
8607 start_sequence ();
8609 /* Store the realpart and the negated imagpart to target. */
8610 emit_move_insn (gen_realpart (partmode, target),
8611 gen_realpart (partmode, op0));
8613 imag_t = gen_imagpart (partmode, target);
8614 temp = expand_unop (partmode,
8615 ! unsignedp && flag_trapv
8616 && (GET_MODE_CLASS(partmode) == MODE_INT)
8617 ? negv_optab : neg_optab,
8618 gen_imagpart (partmode, op0), imag_t, 0);
8619 if (temp != imag_t)
8620 emit_move_insn (imag_t, temp);
8622 insns = get_insns ();
8623 end_sequence ();
8625 /* Conjugate should appear as a single unit
8626 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8627 each with a separate pseudo as destination.
8628 It's not correct for flow to treat them as a unit. */
8629 if (GET_CODE (target) != CONCAT)
8630 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8631 else
8632 emit_insns (insns);
8634 return target;
8637 case TRY_CATCH_EXPR:
8639 tree handler = TREE_OPERAND (exp, 1);
8641 expand_eh_region_start ();
8643 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8645 expand_eh_region_end_cleanup (handler);
8647 return op0;
8650 case TRY_FINALLY_EXPR:
8652 tree try_block = TREE_OPERAND (exp, 0);
8653 tree finally_block = TREE_OPERAND (exp, 1);
8654 rtx finally_label = gen_label_rtx ();
8655 rtx done_label = gen_label_rtx ();
8656 rtx return_link = gen_reg_rtx (Pmode);
8657 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8658 (tree) finally_label, (tree) return_link);
8659 TREE_SIDE_EFFECTS (cleanup) = 1;
8661 /* Start a new binding layer that will keep track of all cleanup
8662 actions to be performed. */
8663 expand_start_bindings (2);
8665 target_temp_slot_level = temp_slot_level;
8667 expand_decl_cleanup (NULL_TREE, cleanup);
8668 op0 = expand_expr (try_block, target, tmode, modifier);
8670 preserve_temp_slots (op0);
8671 expand_end_bindings (NULL_TREE, 0, 0);
8672 emit_jump (done_label);
8673 emit_label (finally_label);
8674 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8675 emit_indirect_jump (return_link);
8676 emit_label (done_label);
8677 return op0;
8680 case GOTO_SUBROUTINE_EXPR:
8682 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8683 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8684 rtx return_address = gen_label_rtx ();
8685 emit_move_insn (return_link,
8686 gen_rtx_LABEL_REF (Pmode, return_address));
8687 emit_jump (subr);
8688 emit_label (return_address);
8689 return const0_rtx;
8692 case VA_ARG_EXPR:
8693 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8695 case EXC_PTR_EXPR:
8696 return get_exception_pointer (cfun);
8698 default:
8699 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8702 /* Here to do an ordinary binary operator, generating an instruction
8703 from the optab already placed in `this_optab'. */
8704 binop:
8705 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8706 subtarget = 0;
8707 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8708 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8709 binop2:
8710 temp = expand_binop (mode, this_optab, op0, op1, target,
8711 unsignedp, OPTAB_LIB_WIDEN);
8712 if (temp == 0)
8713 abort ();
8714 return temp;
8717 /* Similar to expand_expr, except that we don't specify a target, target
8718 mode, or modifier and we return the alignment of the inner type. This is
8719 used in cases where it is not necessary to align the result to the
8720 alignment of its type as long as we know the alignment of the result, for
8721 example for comparisons of BLKmode values. */
8723 static rtx
8724 expand_expr_unaligned (exp, palign)
8725 register tree exp;
8726 unsigned int *palign;
8728 register rtx op0;
8729 tree type = TREE_TYPE (exp);
8730 register enum machine_mode mode = TYPE_MODE (type);
8732 /* Default the alignment we return to that of the type. */
8733 *palign = TYPE_ALIGN (type);
8735 /* The only cases in which we do anything special is if the resulting mode
8736 is BLKmode. */
8737 if (mode != BLKmode)
8738 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8740 switch (TREE_CODE (exp))
8742 case CONVERT_EXPR:
8743 case NOP_EXPR:
8744 case NON_LVALUE_EXPR:
8745 /* Conversions between BLKmode values don't change the underlying
8746 alignment or value. */
8747 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8748 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8749 break;
8751 case ARRAY_REF:
8752 /* Much of the code for this case is copied directly from expand_expr.
8753 We need to duplicate it here because we will do something different
8754 in the fall-through case, so we need to handle the same exceptions
8755 it does. */
8757 tree array = TREE_OPERAND (exp, 0);
8758 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8759 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8760 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8761 HOST_WIDE_INT i;
8763 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8764 abort ();
8766 /* Optimize the special-case of a zero lower bound.
8768 We convert the low_bound to sizetype to avoid some problems
8769 with constant folding. (E.g. suppose the lower bound is 1,
8770 and its mode is QI. Without the conversion, (ARRAY
8771 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8772 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8774 if (! integer_zerop (low_bound))
8775 index = size_diffop (index, convert (sizetype, low_bound));
8777 /* If this is a constant index into a constant array,
8778 just get the value from the array. Handle both the cases when
8779 we have an explicit constructor and when our operand is a variable
8780 that was declared const. */
8782 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8783 && host_integerp (index, 0)
8784 && 0 > compare_tree_int (index,
8785 list_length (CONSTRUCTOR_ELTS
8786 (TREE_OPERAND (exp, 0)))))
8788 tree elem;
8790 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8791 i = tree_low_cst (index, 0);
8792 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8795 if (elem)
8796 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8799 else if (optimize >= 1
8800 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8801 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8802 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8804 if (TREE_CODE (index) == INTEGER_CST)
8806 tree init = DECL_INITIAL (array);
8808 if (TREE_CODE (init) == CONSTRUCTOR)
8810 tree elem;
8812 for (elem = CONSTRUCTOR_ELTS (init);
8813 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8814 elem = TREE_CHAIN (elem))
8817 if (elem)
8818 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8819 palign);
8824 /* Fall through. */
8826 case COMPONENT_REF:
8827 case BIT_FIELD_REF:
8828 /* If the operand is a CONSTRUCTOR, we can just extract the
8829 appropriate field if it is present. Don't do this if we have
8830 already written the data since we want to refer to that copy
8831 and varasm.c assumes that's what we'll do. */
8832 if (TREE_CODE (exp) != ARRAY_REF
8833 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8834 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8836 tree elt;
8838 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8839 elt = TREE_CHAIN (elt))
8840 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8841 /* Note that unlike the case in expand_expr, we know this is
8842 BLKmode and hence not an integer. */
8843 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8847 enum machine_mode mode1;
8848 HOST_WIDE_INT bitsize, bitpos;
8849 tree offset;
8850 int volatilep = 0;
8851 unsigned int alignment;
8852 int unsignedp;
8853 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8854 &mode1, &unsignedp, &volatilep,
8855 &alignment);
8857 /* If we got back the original object, something is wrong. Perhaps
8858 we are evaluating an expression too early. In any event, don't
8859 infinitely recurse. */
8860 if (tem == exp)
8861 abort ();
8863 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8865 /* If this is a constant, put it into a register if it is a
8866 legitimate constant and OFFSET is 0 and memory if it isn't. */
8867 if (CONSTANT_P (op0))
8869 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8871 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8872 && offset == 0)
8873 op0 = force_reg (inner_mode, op0);
8874 else
8875 op0 = validize_mem (force_const_mem (inner_mode, op0));
8878 if (offset != 0)
8880 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8882 /* If this object is in a register, put it into memory.
8883 This case can't occur in C, but can in Ada if we have
8884 unchecked conversion of an expression from a scalar type to
8885 an array or record type. */
8886 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8887 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8889 tree nt = build_qualified_type (TREE_TYPE (tem),
8890 (TYPE_QUALS (TREE_TYPE (tem))
8891 | TYPE_QUAL_CONST));
8892 rtx memloc = assign_temp (nt, 1, 1, 1);
8894 mark_temp_addr_taken (memloc);
8895 emit_move_insn (memloc, op0);
8896 op0 = memloc;
8899 if (GET_CODE (op0) != MEM)
8900 abort ();
8902 if (GET_MODE (offset_rtx) != ptr_mode)
8904 #ifdef POINTERS_EXTEND_UNSIGNED
8905 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8906 #else
8907 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8908 #endif
8911 op0 = change_address (op0, VOIDmode,
8912 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
8913 force_reg (ptr_mode,
8914 offset_rtx)));
8917 /* Don't forget about volatility even if this is a bitfield. */
8918 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
8920 op0 = copy_rtx (op0);
8921 MEM_VOLATILE_P (op0) = 1;
8924 /* Check the access. */
8925 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
8927 rtx to;
8928 int size;
8930 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
8931 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
8933 /* Check the access right of the pointer. */
8934 in_check_memory_usage = 1;
8935 if (size > BITS_PER_UNIT)
8936 emit_library_call (chkr_check_addr_libfunc,
8937 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
8938 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
8939 TYPE_MODE (sizetype),
8940 GEN_INT (MEMORY_USE_RO),
8941 TYPE_MODE (integer_type_node));
8942 in_check_memory_usage = 0;
8945 /* In cases where an aligned union has an unaligned object
8946 as a field, we might be extracting a BLKmode value from
8947 an integer-mode (e.g., SImode) object. Handle this case
8948 by doing the extract into an object as wide as the field
8949 (which we know to be the width of a basic mode), then
8950 storing into memory, and changing the mode to BLKmode.
8951 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8952 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8953 if (mode1 == VOIDmode
8954 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8955 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
8956 && (TYPE_ALIGN (type) > alignment
8957 || bitpos % TYPE_ALIGN (type) != 0)))
8959 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
8961 if (ext_mode == BLKmode)
8963 /* In this case, BITPOS must start at a byte boundary. */
8964 if (GET_CODE (op0) != MEM
8965 || bitpos % BITS_PER_UNIT != 0)
8966 abort ();
8968 op0 = change_address (op0, VOIDmode,
8969 plus_constant (XEXP (op0, 0),
8970 bitpos / BITS_PER_UNIT));
8972 else
8974 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
8975 TYPE_QUAL_CONST);
8976 rtx new = assign_temp (nt, 0, 1, 1);
8978 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
8979 unsignedp, NULL_RTX, ext_mode,
8980 ext_mode, alignment,
8981 int_size_in_bytes (TREE_TYPE (tem)));
8983 /* If the result is a record type and BITSIZE is narrower than
8984 the mode of OP0, an integral mode, and this is a big endian
8985 machine, we must put the field into the high-order bits. */
8986 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
8987 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8988 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
8989 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
8990 size_int (GET_MODE_BITSIZE
8991 (GET_MODE (op0))
8992 - bitsize),
8993 op0, 1);
8995 emit_move_insn (new, op0);
8996 op0 = copy_rtx (new);
8997 PUT_MODE (op0, BLKmode);
9000 else
9001 /* Get a reference to just this component. */
9002 op0 = change_address (op0, mode1,
9003 plus_constant (XEXP (op0, 0),
9004 (bitpos / BITS_PER_UNIT)));
9006 MEM_ALIAS_SET (op0) = get_alias_set (exp);
9008 /* Adjust the alignment in case the bit position is not
9009 a multiple of the alignment of the inner object. */
9010 while (bitpos % alignment != 0)
9011 alignment >>= 1;
9013 if (GET_CODE (XEXP (op0, 0)) == REG)
9014 mark_reg_pointer (XEXP (op0, 0), alignment);
9016 MEM_IN_STRUCT_P (op0) = 1;
9017 MEM_VOLATILE_P (op0) |= volatilep;
9019 *palign = alignment;
9020 return op0;
9023 default:
9024 break;
9028 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9031 /* Return the tree node if a ARG corresponds to a string constant or zero
9032 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9033 in bytes within the string that ARG is accessing. The type of the
9034 offset will be `sizetype'. */
9036 tree
9037 string_constant (arg, ptr_offset)
9038 tree arg;
9039 tree *ptr_offset;
9041 STRIP_NOPS (arg);
9043 if (TREE_CODE (arg) == ADDR_EXPR
9044 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9046 *ptr_offset = size_zero_node;
9047 return TREE_OPERAND (arg, 0);
9049 else if (TREE_CODE (arg) == PLUS_EXPR)
9051 tree arg0 = TREE_OPERAND (arg, 0);
9052 tree arg1 = TREE_OPERAND (arg, 1);
9054 STRIP_NOPS (arg0);
9055 STRIP_NOPS (arg1);
9057 if (TREE_CODE (arg0) == ADDR_EXPR
9058 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9060 *ptr_offset = convert (sizetype, arg1);
9061 return TREE_OPERAND (arg0, 0);
9063 else if (TREE_CODE (arg1) == ADDR_EXPR
9064 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9066 *ptr_offset = convert (sizetype, arg0);
9067 return TREE_OPERAND (arg1, 0);
9071 return 0;
9074 /* Expand code for a post- or pre- increment or decrement
9075 and return the RTX for the result.
9076 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9078 static rtx
9079 expand_increment (exp, post, ignore)
9080 register tree exp;
9081 int post, ignore;
9083 register rtx op0, op1;
9084 register rtx temp, value;
9085 register tree incremented = TREE_OPERAND (exp, 0);
9086 optab this_optab = add_optab;
9087 int icode;
9088 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9089 int op0_is_copy = 0;
9090 int single_insn = 0;
9091 /* 1 means we can't store into OP0 directly,
9092 because it is a subreg narrower than a word,
9093 and we don't dare clobber the rest of the word. */
9094 int bad_subreg = 0;
9096 /* Stabilize any component ref that might need to be
9097 evaluated more than once below. */
9098 if (!post
9099 || TREE_CODE (incremented) == BIT_FIELD_REF
9100 || (TREE_CODE (incremented) == COMPONENT_REF
9101 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9102 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9103 incremented = stabilize_reference (incremented);
9104 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9105 ones into save exprs so that they don't accidentally get evaluated
9106 more than once by the code below. */
9107 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9108 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9109 incremented = save_expr (incremented);
9111 /* Compute the operands as RTX.
9112 Note whether OP0 is the actual lvalue or a copy of it:
9113 I believe it is a copy iff it is a register or subreg
9114 and insns were generated in computing it. */
9116 temp = get_last_insn ();
9117 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9119 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9120 in place but instead must do sign- or zero-extension during assignment,
9121 so we copy it into a new register and let the code below use it as
9122 a copy.
9124 Note that we can safely modify this SUBREG since it is know not to be
9125 shared (it was made by the expand_expr call above). */
9127 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9129 if (post)
9130 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9131 else
9132 bad_subreg = 1;
9134 else if (GET_CODE (op0) == SUBREG
9135 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9137 /* We cannot increment this SUBREG in place. If we are
9138 post-incrementing, get a copy of the old value. Otherwise,
9139 just mark that we cannot increment in place. */
9140 if (post)
9141 op0 = copy_to_reg (op0);
9142 else
9143 bad_subreg = 1;
9146 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9147 && temp != get_last_insn ());
9148 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9149 EXPAND_MEMORY_USE_BAD);
9151 /* Decide whether incrementing or decrementing. */
9152 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9153 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9154 this_optab = sub_optab;
9156 /* Convert decrement by a constant into a negative increment. */
9157 if (this_optab == sub_optab
9158 && GET_CODE (op1) == CONST_INT)
9160 op1 = GEN_INT (-INTVAL (op1));
9161 this_optab = add_optab;
9164 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9165 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9167 /* For a preincrement, see if we can do this with a single instruction. */
9168 if (!post)
9170 icode = (int) this_optab->handlers[(int) mode].insn_code;
9171 if (icode != (int) CODE_FOR_nothing
9172 /* Make sure that OP0 is valid for operands 0 and 1
9173 of the insn we want to queue. */
9174 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9175 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9176 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9177 single_insn = 1;
9180 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9181 then we cannot just increment OP0. We must therefore contrive to
9182 increment the original value. Then, for postincrement, we can return
9183 OP0 since it is a copy of the old value. For preincrement, expand here
9184 unless we can do it with a single insn.
9186 Likewise if storing directly into OP0 would clobber high bits
9187 we need to preserve (bad_subreg). */
9188 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9190 /* This is the easiest way to increment the value wherever it is.
9191 Problems with multiple evaluation of INCREMENTED are prevented
9192 because either (1) it is a component_ref or preincrement,
9193 in which case it was stabilized above, or (2) it is an array_ref
9194 with constant index in an array in a register, which is
9195 safe to reevaluate. */
9196 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9197 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9198 ? MINUS_EXPR : PLUS_EXPR),
9199 TREE_TYPE (exp),
9200 incremented,
9201 TREE_OPERAND (exp, 1));
9203 while (TREE_CODE (incremented) == NOP_EXPR
9204 || TREE_CODE (incremented) == CONVERT_EXPR)
9206 newexp = convert (TREE_TYPE (incremented), newexp);
9207 incremented = TREE_OPERAND (incremented, 0);
9210 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9211 return post ? op0 : temp;
9214 if (post)
9216 /* We have a true reference to the value in OP0.
9217 If there is an insn to add or subtract in this mode, queue it.
9218 Queueing the increment insn avoids the register shuffling
9219 that often results if we must increment now and first save
9220 the old value for subsequent use. */
9222 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9223 op0 = stabilize (op0);
9224 #endif
9226 icode = (int) this_optab->handlers[(int) mode].insn_code;
9227 if (icode != (int) CODE_FOR_nothing
9228 /* Make sure that OP0 is valid for operands 0 and 1
9229 of the insn we want to queue. */
9230 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9231 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9233 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9234 op1 = force_reg (mode, op1);
9236 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9238 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9240 rtx addr = (general_operand (XEXP (op0, 0), mode)
9241 ? force_reg (Pmode, XEXP (op0, 0))
9242 : copy_to_reg (XEXP (op0, 0)));
9243 rtx temp, result;
9245 op0 = change_address (op0, VOIDmode, addr);
9246 temp = force_reg (GET_MODE (op0), op0);
9247 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9248 op1 = force_reg (mode, op1);
9250 /* The increment queue is LIFO, thus we have to `queue'
9251 the instructions in reverse order. */
9252 enqueue_insn (op0, gen_move_insn (op0, temp));
9253 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9254 return result;
9258 /* Preincrement, or we can't increment with one simple insn. */
9259 if (post)
9260 /* Save a copy of the value before inc or dec, to return it later. */
9261 temp = value = copy_to_reg (op0);
9262 else
9263 /* Arrange to return the incremented value. */
9264 /* Copy the rtx because expand_binop will protect from the queue,
9265 and the results of that would be invalid for us to return
9266 if our caller does emit_queue before using our result. */
9267 temp = copy_rtx (value = op0);
9269 /* Increment however we can. */
9270 op1 = expand_binop (mode, this_optab, value, op1,
9271 current_function_check_memory_usage ? NULL_RTX : op0,
9272 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9273 /* Make sure the value is stored into OP0. */
9274 if (op1 != op0)
9275 emit_move_insn (op0, op1);
9277 return temp;
9280 /* At the start of a function, record that we have no previously-pushed
9281 arguments waiting to be popped. */
9283 void
9284 init_pending_stack_adjust ()
9286 pending_stack_adjust = 0;
9289 /* When exiting from function, if safe, clear out any pending stack adjust
9290 so the adjustment won't get done.
9292 Note, if the current function calls alloca, then it must have a
9293 frame pointer regardless of the value of flag_omit_frame_pointer. */
9295 void
9296 clear_pending_stack_adjust ()
9298 #ifdef EXIT_IGNORE_STACK
9299 if (optimize > 0
9300 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9301 && EXIT_IGNORE_STACK
9302 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9303 && ! flag_inline_functions)
9305 stack_pointer_delta -= pending_stack_adjust,
9306 pending_stack_adjust = 0;
9308 #endif
9311 /* Pop any previously-pushed arguments that have not been popped yet. */
9313 void
9314 do_pending_stack_adjust ()
9316 if (inhibit_defer_pop == 0)
9318 if (pending_stack_adjust != 0)
9319 adjust_stack (GEN_INT (pending_stack_adjust));
9320 pending_stack_adjust = 0;
9324 /* Expand conditional expressions. */
9326 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9327 LABEL is an rtx of code CODE_LABEL, in this function and all the
9328 functions here. */
9330 void
9331 jumpifnot (exp, label)
9332 tree exp;
9333 rtx label;
9335 do_jump (exp, label, NULL_RTX);
9338 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9340 void
9341 jumpif (exp, label)
9342 tree exp;
9343 rtx label;
9345 do_jump (exp, NULL_RTX, label);
9348 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9349 the result is zero, or IF_TRUE_LABEL if the result is one.
9350 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9351 meaning fall through in that case.
9353 do_jump always does any pending stack adjust except when it does not
9354 actually perform a jump. An example where there is no jump
9355 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9357 This function is responsible for optimizing cases such as
9358 &&, || and comparison operators in EXP. */
9360 void
9361 do_jump (exp, if_false_label, if_true_label)
9362 tree exp;
9363 rtx if_false_label, if_true_label;
9365 register enum tree_code code = TREE_CODE (exp);
9366 /* Some cases need to create a label to jump to
9367 in order to properly fall through.
9368 These cases set DROP_THROUGH_LABEL nonzero. */
9369 rtx drop_through_label = 0;
9370 rtx temp;
9371 int i;
9372 tree type;
9373 enum machine_mode mode;
9375 #ifdef MAX_INTEGER_COMPUTATION_MODE
9376 check_max_integer_computation_mode (exp);
9377 #endif
9379 emit_queue ();
9381 switch (code)
9383 case ERROR_MARK:
9384 break;
9386 case INTEGER_CST:
9387 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9388 if (temp)
9389 emit_jump (temp);
9390 break;
9392 #if 0
9393 /* This is not true with #pragma weak */
9394 case ADDR_EXPR:
9395 /* The address of something can never be zero. */
9396 if (if_true_label)
9397 emit_jump (if_true_label);
9398 break;
9399 #endif
9401 case NOP_EXPR:
9402 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9403 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9404 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9405 goto normal;
9406 case CONVERT_EXPR:
9407 /* If we are narrowing the operand, we have to do the compare in the
9408 narrower mode. */
9409 if ((TYPE_PRECISION (TREE_TYPE (exp))
9410 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9411 goto normal;
9412 case NON_LVALUE_EXPR:
9413 case REFERENCE_EXPR:
9414 case ABS_EXPR:
9415 case NEGATE_EXPR:
9416 case LROTATE_EXPR:
9417 case RROTATE_EXPR:
9418 /* These cannot change zero->non-zero or vice versa. */
9419 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9420 break;
9422 case WITH_RECORD_EXPR:
9423 /* Put the object on the placeholder list, recurse through our first
9424 operand, and pop the list. */
9425 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9426 placeholder_list);
9427 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9428 placeholder_list = TREE_CHAIN (placeholder_list);
9429 break;
9431 #if 0
9432 /* This is never less insns than evaluating the PLUS_EXPR followed by
9433 a test and can be longer if the test is eliminated. */
9434 case PLUS_EXPR:
9435 /* Reduce to minus. */
9436 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9437 TREE_OPERAND (exp, 0),
9438 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9439 TREE_OPERAND (exp, 1))));
9440 /* Process as MINUS. */
9441 #endif
9443 case MINUS_EXPR:
9444 /* Non-zero iff operands of minus differ. */
9445 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9446 TREE_OPERAND (exp, 0),
9447 TREE_OPERAND (exp, 1)),
9448 NE, NE, if_false_label, if_true_label);
9449 break;
9451 case BIT_AND_EXPR:
9452 /* If we are AND'ing with a small constant, do this comparison in the
9453 smallest type that fits. If the machine doesn't have comparisons
9454 that small, it will be converted back to the wider comparison.
9455 This helps if we are testing the sign bit of a narrower object.
9456 combine can't do this for us because it can't know whether a
9457 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9459 if (! SLOW_BYTE_ACCESS
9460 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9461 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9462 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9463 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9464 && (type = type_for_mode (mode, 1)) != 0
9465 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9466 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9467 != CODE_FOR_nothing))
9469 do_jump (convert (type, exp), if_false_label, if_true_label);
9470 break;
9472 goto normal;
9474 case TRUTH_NOT_EXPR:
9475 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9476 break;
9478 case TRUTH_ANDIF_EXPR:
9479 if (if_false_label == 0)
9480 if_false_label = drop_through_label = gen_label_rtx ();
9481 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9482 start_cleanup_deferral ();
9483 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9484 end_cleanup_deferral ();
9485 break;
9487 case TRUTH_ORIF_EXPR:
9488 if (if_true_label == 0)
9489 if_true_label = drop_through_label = gen_label_rtx ();
9490 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9491 start_cleanup_deferral ();
9492 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9493 end_cleanup_deferral ();
9494 break;
9496 case COMPOUND_EXPR:
9497 push_temp_slots ();
9498 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9499 preserve_temp_slots (NULL_RTX);
9500 free_temp_slots ();
9501 pop_temp_slots ();
9502 emit_queue ();
9503 do_pending_stack_adjust ();
9504 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9505 break;
9507 case COMPONENT_REF:
9508 case BIT_FIELD_REF:
9509 case ARRAY_REF:
9511 HOST_WIDE_INT bitsize, bitpos;
9512 int unsignedp;
9513 enum machine_mode mode;
9514 tree type;
9515 tree offset;
9516 int volatilep = 0;
9517 unsigned int alignment;
9519 /* Get description of this reference. We don't actually care
9520 about the underlying object here. */
9521 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9522 &unsignedp, &volatilep, &alignment);
9524 type = type_for_size (bitsize, unsignedp);
9525 if (! SLOW_BYTE_ACCESS
9526 && type != 0 && bitsize >= 0
9527 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9528 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9529 != CODE_FOR_nothing))
9531 do_jump (convert (type, exp), if_false_label, if_true_label);
9532 break;
9534 goto normal;
9537 case COND_EXPR:
9538 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9539 if (integer_onep (TREE_OPERAND (exp, 1))
9540 && integer_zerop (TREE_OPERAND (exp, 2)))
9541 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9543 else if (integer_zerop (TREE_OPERAND (exp, 1))
9544 && integer_onep (TREE_OPERAND (exp, 2)))
9545 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9547 else
9549 register rtx label1 = gen_label_rtx ();
9550 drop_through_label = gen_label_rtx ();
9552 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9554 start_cleanup_deferral ();
9555 /* Now the THEN-expression. */
9556 do_jump (TREE_OPERAND (exp, 1),
9557 if_false_label ? if_false_label : drop_through_label,
9558 if_true_label ? if_true_label : drop_through_label);
9559 /* In case the do_jump just above never jumps. */
9560 do_pending_stack_adjust ();
9561 emit_label (label1);
9563 /* Now the ELSE-expression. */
9564 do_jump (TREE_OPERAND (exp, 2),
9565 if_false_label ? if_false_label : drop_through_label,
9566 if_true_label ? if_true_label : drop_through_label);
9567 end_cleanup_deferral ();
9569 break;
9571 case EQ_EXPR:
9573 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9575 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9576 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9578 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9579 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9580 do_jump
9581 (fold
9582 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9583 fold (build (EQ_EXPR, TREE_TYPE (exp),
9584 fold (build1 (REALPART_EXPR,
9585 TREE_TYPE (inner_type),
9586 exp0)),
9587 fold (build1 (REALPART_EXPR,
9588 TREE_TYPE (inner_type),
9589 exp1)))),
9590 fold (build (EQ_EXPR, TREE_TYPE (exp),
9591 fold (build1 (IMAGPART_EXPR,
9592 TREE_TYPE (inner_type),
9593 exp0)),
9594 fold (build1 (IMAGPART_EXPR,
9595 TREE_TYPE (inner_type),
9596 exp1)))))),
9597 if_false_label, if_true_label);
9600 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9601 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9603 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9604 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9605 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9606 else
9607 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9608 break;
9611 case NE_EXPR:
9613 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9615 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9616 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9618 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9619 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9620 do_jump
9621 (fold
9622 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9623 fold (build (NE_EXPR, TREE_TYPE (exp),
9624 fold (build1 (REALPART_EXPR,
9625 TREE_TYPE (inner_type),
9626 exp0)),
9627 fold (build1 (REALPART_EXPR,
9628 TREE_TYPE (inner_type),
9629 exp1)))),
9630 fold (build (NE_EXPR, TREE_TYPE (exp),
9631 fold (build1 (IMAGPART_EXPR,
9632 TREE_TYPE (inner_type),
9633 exp0)),
9634 fold (build1 (IMAGPART_EXPR,
9635 TREE_TYPE (inner_type),
9636 exp1)))))),
9637 if_false_label, if_true_label);
9640 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9641 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9643 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9644 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9645 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9646 else
9647 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9648 break;
9651 case LT_EXPR:
9652 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9653 if (GET_MODE_CLASS (mode) == MODE_INT
9654 && ! can_compare_p (LT, mode, ccp_jump))
9655 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9656 else
9657 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9658 break;
9660 case LE_EXPR:
9661 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9662 if (GET_MODE_CLASS (mode) == MODE_INT
9663 && ! can_compare_p (LE, mode, ccp_jump))
9664 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9665 else
9666 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9667 break;
9669 case GT_EXPR:
9670 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9671 if (GET_MODE_CLASS (mode) == MODE_INT
9672 && ! can_compare_p (GT, mode, ccp_jump))
9673 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9674 else
9675 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9676 break;
9678 case GE_EXPR:
9679 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9680 if (GET_MODE_CLASS (mode) == MODE_INT
9681 && ! can_compare_p (GE, mode, ccp_jump))
9682 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9683 else
9684 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9685 break;
9687 case UNORDERED_EXPR:
9688 case ORDERED_EXPR:
9690 enum rtx_code cmp, rcmp;
9691 int do_rev;
9693 if (code == UNORDERED_EXPR)
9694 cmp = UNORDERED, rcmp = ORDERED;
9695 else
9696 cmp = ORDERED, rcmp = UNORDERED;
9697 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9699 do_rev = 0;
9700 if (! can_compare_p (cmp, mode, ccp_jump)
9701 && (can_compare_p (rcmp, mode, ccp_jump)
9702 /* If the target doesn't provide either UNORDERED or ORDERED
9703 comparisons, canonicalize on UNORDERED for the library. */
9704 || rcmp == UNORDERED))
9705 do_rev = 1;
9707 if (! do_rev)
9708 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9709 else
9710 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9712 break;
9715 enum rtx_code rcode1;
9716 enum tree_code tcode2;
9718 case UNLT_EXPR:
9719 rcode1 = UNLT;
9720 tcode2 = LT_EXPR;
9721 goto unordered_bcc;
9722 case UNLE_EXPR:
9723 rcode1 = UNLE;
9724 tcode2 = LE_EXPR;
9725 goto unordered_bcc;
9726 case UNGT_EXPR:
9727 rcode1 = UNGT;
9728 tcode2 = GT_EXPR;
9729 goto unordered_bcc;
9730 case UNGE_EXPR:
9731 rcode1 = UNGE;
9732 tcode2 = GE_EXPR;
9733 goto unordered_bcc;
9734 case UNEQ_EXPR:
9735 rcode1 = UNEQ;
9736 tcode2 = EQ_EXPR;
9737 goto unordered_bcc;
9739 unordered_bcc:
9740 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9741 if (can_compare_p (rcode1, mode, ccp_jump))
9742 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9743 if_true_label);
9744 else
9746 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9747 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9748 tree cmp0, cmp1;
9750 /* If the target doesn't support combined unordered
9751 compares, decompose into UNORDERED + comparison. */
9752 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9753 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9754 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9755 do_jump (exp, if_false_label, if_true_label);
9758 break;
9760 default:
9761 normal:
9762 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9763 #if 0
9764 /* This is not needed any more and causes poor code since it causes
9765 comparisons and tests from non-SI objects to have different code
9766 sequences. */
9767 /* Copy to register to avoid generating bad insns by cse
9768 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9769 if (!cse_not_expected && GET_CODE (temp) == MEM)
9770 temp = copy_to_reg (temp);
9771 #endif
9772 do_pending_stack_adjust ();
9773 /* Do any postincrements in the expression that was tested. */
9774 emit_queue ();
9776 if (GET_CODE (temp) == CONST_INT
9777 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9778 || GET_CODE (temp) == LABEL_REF)
9780 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9781 if (target)
9782 emit_jump (target);
9784 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9785 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9786 /* Note swapping the labels gives us not-equal. */
9787 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9788 else if (GET_MODE (temp) != VOIDmode)
9789 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9790 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9791 GET_MODE (temp), NULL_RTX, 0,
9792 if_false_label, if_true_label);
9793 else
9794 abort ();
9797 if (drop_through_label)
9799 /* If do_jump produces code that might be jumped around,
9800 do any stack adjusts from that code, before the place
9801 where control merges in. */
9802 do_pending_stack_adjust ();
9803 emit_label (drop_through_label);
9807 /* Given a comparison expression EXP for values too wide to be compared
9808 with one insn, test the comparison and jump to the appropriate label.
9809 The code of EXP is ignored; we always test GT if SWAP is 0,
9810 and LT if SWAP is 1. */
9812 static void
9813 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9814 tree exp;
9815 int swap;
9816 rtx if_false_label, if_true_label;
9818 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9819 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9820 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9821 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9823 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9826 /* Compare OP0 with OP1, word at a time, in mode MODE.
9827 UNSIGNEDP says to do unsigned comparison.
9828 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9830 void
9831 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9832 enum machine_mode mode;
9833 int unsignedp;
9834 rtx op0, op1;
9835 rtx if_false_label, if_true_label;
9837 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9838 rtx drop_through_label = 0;
9839 int i;
9841 if (! if_true_label || ! if_false_label)
9842 drop_through_label = gen_label_rtx ();
9843 if (! if_true_label)
9844 if_true_label = drop_through_label;
9845 if (! if_false_label)
9846 if_false_label = drop_through_label;
9848 /* Compare a word at a time, high order first. */
9849 for (i = 0; i < nwords; i++)
9851 rtx op0_word, op1_word;
9853 if (WORDS_BIG_ENDIAN)
9855 op0_word = operand_subword_force (op0, i, mode);
9856 op1_word = operand_subword_force (op1, i, mode);
9858 else
9860 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9861 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9864 /* All but high-order word must be compared as unsigned. */
9865 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9866 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9867 NULL_RTX, if_true_label);
9869 /* Consider lower words only if these are equal. */
9870 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9871 NULL_RTX, 0, NULL_RTX, if_false_label);
9874 if (if_false_label)
9875 emit_jump (if_false_label);
9876 if (drop_through_label)
9877 emit_label (drop_through_label);
9880 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9881 with one insn, test the comparison and jump to the appropriate label. */
9883 static void
9884 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9885 tree exp;
9886 rtx if_false_label, if_true_label;
9888 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9889 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9890 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9891 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9892 int i;
9893 rtx drop_through_label = 0;
9895 if (! if_false_label)
9896 drop_through_label = if_false_label = gen_label_rtx ();
9898 for (i = 0; i < nwords; i++)
9899 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9900 operand_subword_force (op1, i, mode),
9901 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9902 word_mode, NULL_RTX, 0, if_false_label,
9903 NULL_RTX);
9905 if (if_true_label)
9906 emit_jump (if_true_label);
9907 if (drop_through_label)
9908 emit_label (drop_through_label);
9911 /* Jump according to whether OP0 is 0.
9912 We assume that OP0 has an integer mode that is too wide
9913 for the available compare insns. */
9915 void
9916 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9917 rtx op0;
9918 rtx if_false_label, if_true_label;
9920 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9921 rtx part;
9922 int i;
9923 rtx drop_through_label = 0;
9925 /* The fastest way of doing this comparison on almost any machine is to
9926 "or" all the words and compare the result. If all have to be loaded
9927 from memory and this is a very wide item, it's possible this may
9928 be slower, but that's highly unlikely. */
9930 part = gen_reg_rtx (word_mode);
9931 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9932 for (i = 1; i < nwords && part != 0; i++)
9933 part = expand_binop (word_mode, ior_optab, part,
9934 operand_subword_force (op0, i, GET_MODE (op0)),
9935 part, 1, OPTAB_WIDEN);
9937 if (part != 0)
9939 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9940 NULL_RTX, 0, if_false_label, if_true_label);
9942 return;
9945 /* If we couldn't do the "or" simply, do this with a series of compares. */
9946 if (! if_false_label)
9947 drop_through_label = if_false_label = gen_label_rtx ();
9949 for (i = 0; i < nwords; i++)
9950 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9951 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9952 if_false_label, NULL_RTX);
9954 if (if_true_label)
9955 emit_jump (if_true_label);
9957 if (drop_through_label)
9958 emit_label (drop_through_label);
9961 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9962 (including code to compute the values to be compared)
9963 and set (CC0) according to the result.
9964 The decision as to signed or unsigned comparison must be made by the caller.
9966 We force a stack adjustment unless there are currently
9967 things pushed on the stack that aren't yet used.
9969 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9970 compared.
9972 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9973 size of MODE should be used. */
9976 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9977 register rtx op0, op1;
9978 enum rtx_code code;
9979 int unsignedp;
9980 enum machine_mode mode;
9981 rtx size;
9982 unsigned int align;
9984 rtx tem;
9986 /* If one operand is constant, make it the second one. Only do this
9987 if the other operand is not constant as well. */
9989 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9990 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9992 tem = op0;
9993 op0 = op1;
9994 op1 = tem;
9995 code = swap_condition (code);
9998 if (flag_force_mem)
10000 op0 = force_not_mem (op0);
10001 op1 = force_not_mem (op1);
10004 do_pending_stack_adjust ();
10006 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10007 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10008 return tem;
10010 #if 0
10011 /* There's no need to do this now that combine.c can eliminate lots of
10012 sign extensions. This can be less efficient in certain cases on other
10013 machines. */
10015 /* If this is a signed equality comparison, we can do it as an
10016 unsigned comparison since zero-extension is cheaper than sign
10017 extension and comparisons with zero are done as unsigned. This is
10018 the case even on machines that can do fast sign extension, since
10019 zero-extension is easier to combine with other operations than
10020 sign-extension is. If we are comparing against a constant, we must
10021 convert it to what it would look like unsigned. */
10022 if ((code == EQ || code == NE) && ! unsignedp
10023 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10025 if (GET_CODE (op1) == CONST_INT
10026 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10027 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10028 unsignedp = 1;
10030 #endif
10032 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10034 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10037 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10038 The decision as to signed or unsigned comparison must be made by the caller.
10040 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10041 compared.
10043 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10044 size of MODE should be used. */
10046 void
10047 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
10048 if_false_label, if_true_label)
10049 register rtx op0, op1;
10050 enum rtx_code code;
10051 int unsignedp;
10052 enum machine_mode mode;
10053 rtx size;
10054 unsigned int align;
10055 rtx if_false_label, if_true_label;
10057 rtx tem;
10058 int dummy_true_label = 0;
10060 /* Reverse the comparison if that is safe and we want to jump if it is
10061 false. */
10062 if (! if_true_label && ! FLOAT_MODE_P (mode))
10064 if_true_label = if_false_label;
10065 if_false_label = 0;
10066 code = reverse_condition (code);
10069 /* If one operand is constant, make it the second one. Only do this
10070 if the other operand is not constant as well. */
10072 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10073 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10075 tem = op0;
10076 op0 = op1;
10077 op1 = tem;
10078 code = swap_condition (code);
10081 if (flag_force_mem)
10083 op0 = force_not_mem (op0);
10084 op1 = force_not_mem (op1);
10087 do_pending_stack_adjust ();
10089 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10090 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10092 if (tem == const_true_rtx)
10094 if (if_true_label)
10095 emit_jump (if_true_label);
10097 else
10099 if (if_false_label)
10100 emit_jump (if_false_label);
10102 return;
10105 #if 0
10106 /* There's no need to do this now that combine.c can eliminate lots of
10107 sign extensions. This can be less efficient in certain cases on other
10108 machines. */
10110 /* If this is a signed equality comparison, we can do it as an
10111 unsigned comparison since zero-extension is cheaper than sign
10112 extension and comparisons with zero are done as unsigned. This is
10113 the case even on machines that can do fast sign extension, since
10114 zero-extension is easier to combine with other operations than
10115 sign-extension is. If we are comparing against a constant, we must
10116 convert it to what it would look like unsigned. */
10117 if ((code == EQ || code == NE) && ! unsignedp
10118 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10120 if (GET_CODE (op1) == CONST_INT
10121 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10122 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10123 unsignedp = 1;
10125 #endif
10127 if (! if_true_label)
10129 dummy_true_label = 1;
10130 if_true_label = gen_label_rtx ();
10133 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10134 if_true_label);
10136 if (if_false_label)
10137 emit_jump (if_false_label);
10138 if (dummy_true_label)
10139 emit_label (if_true_label);
10142 /* Generate code for a comparison expression EXP (including code to compute
10143 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10144 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10145 generated code will drop through.
10146 SIGNED_CODE should be the rtx operation for this comparison for
10147 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10149 We force a stack adjustment unless there are currently
10150 things pushed on the stack that aren't yet used. */
10152 static void
10153 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10154 if_true_label)
10155 register tree exp;
10156 enum rtx_code signed_code, unsigned_code;
10157 rtx if_false_label, if_true_label;
10159 unsigned int align0, align1;
10160 register rtx op0, op1;
10161 register tree type;
10162 register enum machine_mode mode;
10163 int unsignedp;
10164 enum rtx_code code;
10166 /* Don't crash if the comparison was erroneous. */
10167 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10168 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10169 return;
10171 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10172 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10173 return;
10175 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10176 mode = TYPE_MODE (type);
10177 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10178 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10179 || (GET_MODE_BITSIZE (mode)
10180 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10181 1)))))))
10183 /* op0 might have been replaced by promoted constant, in which
10184 case the type of second argument should be used. */
10185 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10186 mode = TYPE_MODE (type);
10188 unsignedp = TREE_UNSIGNED (type);
10189 code = unsignedp ? unsigned_code : signed_code;
10191 #ifdef HAVE_canonicalize_funcptr_for_compare
10192 /* If function pointers need to be "canonicalized" before they can
10193 be reliably compared, then canonicalize them. */
10194 if (HAVE_canonicalize_funcptr_for_compare
10195 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10196 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10197 == FUNCTION_TYPE))
10199 rtx new_op0 = gen_reg_rtx (mode);
10201 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10202 op0 = new_op0;
10205 if (HAVE_canonicalize_funcptr_for_compare
10206 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10207 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10208 == FUNCTION_TYPE))
10210 rtx new_op1 = gen_reg_rtx (mode);
10212 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10213 op1 = new_op1;
10215 #endif
10217 /* Do any postincrements in the expression that was tested. */
10218 emit_queue ();
10220 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10221 ((mode == BLKmode)
10222 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10223 MIN (align0, align1),
10224 if_false_label, if_true_label);
10227 /* Generate code to calculate EXP using a store-flag instruction
10228 and return an rtx for the result. EXP is either a comparison
10229 or a TRUTH_NOT_EXPR whose operand is a comparison.
10231 If TARGET is nonzero, store the result there if convenient.
10233 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10234 cheap.
10236 Return zero if there is no suitable set-flag instruction
10237 available on this machine.
10239 Once expand_expr has been called on the arguments of the comparison,
10240 we are committed to doing the store flag, since it is not safe to
10241 re-evaluate the expression. We emit the store-flag insn by calling
10242 emit_store_flag, but only expand the arguments if we have a reason
10243 to believe that emit_store_flag will be successful. If we think that
10244 it will, but it isn't, we have to simulate the store-flag with a
10245 set/jump/set sequence. */
10247 static rtx
10248 do_store_flag (exp, target, mode, only_cheap)
10249 tree exp;
10250 rtx target;
10251 enum machine_mode mode;
10252 int only_cheap;
10254 enum rtx_code code;
10255 tree arg0, arg1, type;
10256 tree tem;
10257 enum machine_mode operand_mode;
10258 int invert = 0;
10259 int unsignedp;
10260 rtx op0, op1;
10261 enum insn_code icode;
10262 rtx subtarget = target;
10263 rtx result, label;
10265 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10266 result at the end. We can't simply invert the test since it would
10267 have already been inverted if it were valid. This case occurs for
10268 some floating-point comparisons. */
10270 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10271 invert = 1, exp = TREE_OPERAND (exp, 0);
10273 arg0 = TREE_OPERAND (exp, 0);
10274 arg1 = TREE_OPERAND (exp, 1);
10276 /* Don't crash if the comparison was erroneous. */
10277 if (arg0 == error_mark_node || arg1 == error_mark_node)
10278 return const0_rtx;
10280 type = TREE_TYPE (arg0);
10281 operand_mode = TYPE_MODE (type);
10282 unsignedp = TREE_UNSIGNED (type);
10284 /* We won't bother with BLKmode store-flag operations because it would mean
10285 passing a lot of information to emit_store_flag. */
10286 if (operand_mode == BLKmode)
10287 return 0;
10289 /* We won't bother with store-flag operations involving function pointers
10290 when function pointers must be canonicalized before comparisons. */
10291 #ifdef HAVE_canonicalize_funcptr_for_compare
10292 if (HAVE_canonicalize_funcptr_for_compare
10293 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10294 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10295 == FUNCTION_TYPE))
10296 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10297 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10298 == FUNCTION_TYPE))))
10299 return 0;
10300 #endif
10302 STRIP_NOPS (arg0);
10303 STRIP_NOPS (arg1);
10305 /* Get the rtx comparison code to use. We know that EXP is a comparison
10306 operation of some type. Some comparisons against 1 and -1 can be
10307 converted to comparisons with zero. Do so here so that the tests
10308 below will be aware that we have a comparison with zero. These
10309 tests will not catch constants in the first operand, but constants
10310 are rarely passed as the first operand. */
10312 switch (TREE_CODE (exp))
10314 case EQ_EXPR:
10315 code = EQ;
10316 break;
10317 case NE_EXPR:
10318 code = NE;
10319 break;
10320 case LT_EXPR:
10321 if (integer_onep (arg1))
10322 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10323 else
10324 code = unsignedp ? LTU : LT;
10325 break;
10326 case LE_EXPR:
10327 if (! unsignedp && integer_all_onesp (arg1))
10328 arg1 = integer_zero_node, code = LT;
10329 else
10330 code = unsignedp ? LEU : LE;
10331 break;
10332 case GT_EXPR:
10333 if (! unsignedp && integer_all_onesp (arg1))
10334 arg1 = integer_zero_node, code = GE;
10335 else
10336 code = unsignedp ? GTU : GT;
10337 break;
10338 case GE_EXPR:
10339 if (integer_onep (arg1))
10340 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10341 else
10342 code = unsignedp ? GEU : GE;
10343 break;
10345 case UNORDERED_EXPR:
10346 code = UNORDERED;
10347 break;
10348 case ORDERED_EXPR:
10349 code = ORDERED;
10350 break;
10351 case UNLT_EXPR:
10352 code = UNLT;
10353 break;
10354 case UNLE_EXPR:
10355 code = UNLE;
10356 break;
10357 case UNGT_EXPR:
10358 code = UNGT;
10359 break;
10360 case UNGE_EXPR:
10361 code = UNGE;
10362 break;
10363 case UNEQ_EXPR:
10364 code = UNEQ;
10365 break;
10367 default:
10368 abort ();
10371 /* Put a constant second. */
10372 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10374 tem = arg0; arg0 = arg1; arg1 = tem;
10375 code = swap_condition (code);
10378 /* If this is an equality or inequality test of a single bit, we can
10379 do this by shifting the bit being tested to the low-order bit and
10380 masking the result with the constant 1. If the condition was EQ,
10381 we xor it with 1. This does not require an scc insn and is faster
10382 than an scc insn even if we have it. */
10384 if ((code == NE || code == EQ)
10385 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10386 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10388 tree inner = TREE_OPERAND (arg0, 0);
10389 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10390 int ops_unsignedp;
10392 /* If INNER is a right shift of a constant and it plus BITNUM does
10393 not overflow, adjust BITNUM and INNER. */
10395 if (TREE_CODE (inner) == RSHIFT_EXPR
10396 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10397 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10398 && bitnum < TYPE_PRECISION (type)
10399 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10400 bitnum - TYPE_PRECISION (type)))
10402 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10403 inner = TREE_OPERAND (inner, 0);
10406 /* If we are going to be able to omit the AND below, we must do our
10407 operations as unsigned. If we must use the AND, we have a choice.
10408 Normally unsigned is faster, but for some machines signed is. */
10409 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10410 #ifdef LOAD_EXTEND_OP
10411 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10412 #else
10414 #endif
10417 if (! get_subtarget (subtarget)
10418 || GET_MODE (subtarget) != operand_mode
10419 || ! safe_from_p (subtarget, inner, 1))
10420 subtarget = 0;
10422 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10424 if (bitnum != 0)
10425 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10426 size_int (bitnum), subtarget, ops_unsignedp);
10428 if (GET_MODE (op0) != mode)
10429 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10431 if ((code == EQ && ! invert) || (code == NE && invert))
10432 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10433 ops_unsignedp, OPTAB_LIB_WIDEN);
10435 /* Put the AND last so it can combine with more things. */
10436 if (bitnum != TYPE_PRECISION (type) - 1)
10437 op0 = expand_and (op0, const1_rtx, subtarget);
10439 return op0;
10442 /* Now see if we are likely to be able to do this. Return if not. */
10443 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10444 return 0;
10446 icode = setcc_gen_code[(int) code];
10447 if (icode == CODE_FOR_nothing
10448 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10450 /* We can only do this if it is one of the special cases that
10451 can be handled without an scc insn. */
10452 if ((code == LT && integer_zerop (arg1))
10453 || (! only_cheap && code == GE && integer_zerop (arg1)))
10455 else if (BRANCH_COST >= 0
10456 && ! only_cheap && (code == NE || code == EQ)
10457 && TREE_CODE (type) != REAL_TYPE
10458 && ((abs_optab->handlers[(int) operand_mode].insn_code
10459 != CODE_FOR_nothing)
10460 || (ffs_optab->handlers[(int) operand_mode].insn_code
10461 != CODE_FOR_nothing)))
10463 else
10464 return 0;
10467 if (! get_subtarget (target)
10468 || GET_MODE (subtarget) != operand_mode
10469 || ! safe_from_p (subtarget, arg1, 1))
10470 subtarget = 0;
10472 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10473 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10475 if (target == 0)
10476 target = gen_reg_rtx (mode);
10478 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10479 because, if the emit_store_flag does anything it will succeed and
10480 OP0 and OP1 will not be used subsequently. */
10482 result = emit_store_flag (target, code,
10483 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10484 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10485 operand_mode, unsignedp, 1);
10487 if (result)
10489 if (invert)
10490 result = expand_binop (mode, xor_optab, result, const1_rtx,
10491 result, 0, OPTAB_LIB_WIDEN);
10492 return result;
10495 /* If this failed, we have to do this with set/compare/jump/set code. */
10496 if (GET_CODE (target) != REG
10497 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10498 target = gen_reg_rtx (GET_MODE (target));
10500 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10501 result = compare_from_rtx (op0, op1, code, unsignedp,
10502 operand_mode, NULL_RTX, 0);
10503 if (GET_CODE (result) == CONST_INT)
10504 return (((result == const0_rtx && ! invert)
10505 || (result != const0_rtx && invert))
10506 ? const0_rtx : const1_rtx);
10508 label = gen_label_rtx ();
10509 if (bcc_gen_fctn[(int) code] == 0)
10510 abort ();
10512 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10513 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10514 emit_label (label);
10516 return target;
10519 /* Generate a tablejump instruction (used for switch statements). */
10521 #ifdef HAVE_tablejump
10523 /* INDEX is the value being switched on, with the lowest value
10524 in the table already subtracted.
10525 MODE is its expected mode (needed if INDEX is constant).
10526 RANGE is the length of the jump table.
10527 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10529 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10530 index value is out of range. */
10532 void
10533 do_tablejump (index, mode, range, table_label, default_label)
10534 rtx index, range, table_label, default_label;
10535 enum machine_mode mode;
10537 register rtx temp, vector;
10539 /* Do an unsigned comparison (in the proper mode) between the index
10540 expression and the value which represents the length of the range.
10541 Since we just finished subtracting the lower bound of the range
10542 from the index expression, this comparison allows us to simultaneously
10543 check that the original index expression value is both greater than
10544 or equal to the minimum value of the range and less than or equal to
10545 the maximum value of the range. */
10547 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10548 0, default_label);
10550 /* If index is in range, it must fit in Pmode.
10551 Convert to Pmode so we can index with it. */
10552 if (mode != Pmode)
10553 index = convert_to_mode (Pmode, index, 1);
10555 /* Don't let a MEM slip thru, because then INDEX that comes
10556 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10557 and break_out_memory_refs will go to work on it and mess it up. */
10558 #ifdef PIC_CASE_VECTOR_ADDRESS
10559 if (flag_pic && GET_CODE (index) != REG)
10560 index = copy_to_mode_reg (Pmode, index);
10561 #endif
10563 /* If flag_force_addr were to affect this address
10564 it could interfere with the tricky assumptions made
10565 about addresses that contain label-refs,
10566 which may be valid only very near the tablejump itself. */
10567 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10568 GET_MODE_SIZE, because this indicates how large insns are. The other
10569 uses should all be Pmode, because they are addresses. This code
10570 could fail if addresses and insns are not the same size. */
10571 index = gen_rtx_PLUS (Pmode,
10572 gen_rtx_MULT (Pmode, index,
10573 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10574 gen_rtx_LABEL_REF (Pmode, table_label));
10575 #ifdef PIC_CASE_VECTOR_ADDRESS
10576 if (flag_pic)
10577 index = PIC_CASE_VECTOR_ADDRESS (index);
10578 else
10579 #endif
10580 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10581 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10582 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10583 RTX_UNCHANGING_P (vector) = 1;
10584 convert_move (temp, vector, 0);
10586 emit_jump_insn (gen_tablejump (temp, table_label));
10588 /* If we are generating PIC code or if the table is PC-relative, the
10589 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10590 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10591 emit_barrier ();
10594 #endif /* HAVE_tablejump */