* expr.c (expand_expr, case ARRAY_REF): Correct check for
[official-gcc.git] / gcc / expr.c
blob1b2a4e66d97092e80bbc73d77b6ae5f447c9e885
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-config.h"
34 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
35 #include "expr.h"
36 #include "recog.h"
37 #include "reload.h"
38 #include "output.h"
39 #include "typeclass.h"
40 #include "toplev.h"
41 #include "ggc.h"
42 #include "intl.h"
43 #include "tm_p.h"
45 /* Decide whether a function's arguments should be processed
46 from first to last or from last to first.
48 They should if the stack and args grow in opposite directions, but
49 only if we have push insns. */
51 #ifdef PUSH_ROUNDING
53 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
54 #define PUSH_ARGS_REVERSED /* If it's last to first. */
55 #endif
57 #endif
59 #ifndef STACK_PUSH_CODE
60 #ifdef STACK_GROWS_DOWNWARD
61 #define STACK_PUSH_CODE PRE_DEC
62 #else
63 #define STACK_PUSH_CODE PRE_INC
64 #endif
65 #endif
67 /* Assume that case vectors are not pc-relative. */
68 #ifndef CASE_VECTOR_PC_RELATIVE
69 #define CASE_VECTOR_PC_RELATIVE 0
70 #endif
72 /* Hook called by safe_from_p for language-specific tree codes. It is
73 up to the language front-end to install a hook if it has any such
74 codes that safe_from_p needs to know about. Since same_from_p will
75 recursively explore the TREE_OPERANDs of an expression, this hook
76 should not reexamine those pieces. This routine may recursively
77 call safe_from_p; it should always pass `0' as the TOP_P
78 parameter. */
79 int (*lang_safe_from_p) PARAMS ((rtx, tree));
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
87 int cse_not_expected;
89 /* Don't check memory usage, since code is being emitted to check a memory
90 usage. Used when current_function_check_memory_usage is true, to avoid
91 infinite recursion. */
92 static int in_check_memory_usage;
94 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
95 static tree placeholder_list = 0;
97 /* This structure is used by move_by_pieces to describe the move to
98 be performed. */
99 struct move_by_pieces
101 rtx to;
102 rtx to_addr;
103 int autinc_to;
104 int explicit_inc_to;
105 rtx from;
106 rtx from_addr;
107 int autinc_from;
108 int explicit_inc_from;
109 unsigned HOST_WIDE_INT len;
110 HOST_WIDE_INT offset;
111 int reverse;
114 /* This structure is used by store_by_pieces to describe the clear to
115 be performed. */
117 struct store_by_pieces
119 rtx to;
120 rtx to_addr;
121 int autinc_to;
122 int explicit_inc_to;
123 unsigned HOST_WIDE_INT len;
124 HOST_WIDE_INT offset;
125 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
126 PTR constfundata;
127 int reverse;
130 extern struct obstack permanent_obstack;
132 static rtx get_push_address PARAMS ((int));
134 static rtx enqueue_insn PARAMS ((rtx, rtx));
135 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
136 PARAMS ((unsigned HOST_WIDE_INT,
137 unsigned int));
138 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
139 struct move_by_pieces *));
140 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
141 enum machine_mode));
142 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
143 unsigned int));
144 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
145 unsigned int));
146 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
147 enum machine_mode,
148 struct store_by_pieces *));
149 static rtx get_subtarget PARAMS ((rtx));
150 static int is_zeros_p PARAMS ((tree));
151 static int mostly_zeros_p PARAMS ((tree));
152 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
153 HOST_WIDE_INT, enum machine_mode,
154 tree, tree, unsigned int, int,
155 int));
156 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
157 HOST_WIDE_INT));
158 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
159 HOST_WIDE_INT, enum machine_mode,
160 tree, enum machine_mode, int,
161 unsigned int, HOST_WIDE_INT, int));
162 static enum memory_use_mode
163 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
164 static tree save_noncopied_parts PARAMS ((tree, tree));
165 static tree init_noncopied_parts PARAMS ((tree, tree));
166 static int fixed_type_p PARAMS ((tree));
167 static rtx var_rtx PARAMS ((tree));
168 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
169 static rtx expand_increment PARAMS ((tree, int, int));
170 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
171 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
172 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
173 rtx, rtx));
174 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
176 /* Record for each mode whether we can move a register directly to or
177 from an object of that mode in memory. If we can't, we won't try
178 to use that mode directly when accessing a field of that mode. */
180 static char direct_load[NUM_MACHINE_MODES];
181 static char direct_store[NUM_MACHINE_MODES];
183 /* If a memory-to-memory move would take MOVE_RATIO or more simple
184 move-instruction sequences, we will do a movstr or libcall instead. */
186 #ifndef MOVE_RATIO
187 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
188 #define MOVE_RATIO 2
189 #else
190 /* If we are optimizing for space (-Os), cut down the default move ratio. */
191 #define MOVE_RATIO (optimize_size ? 3 : 15)
192 #endif
193 #endif
195 /* This macro is used to determine whether move_by_pieces should be called
196 to perform a structure copy. */
197 #ifndef MOVE_BY_PIECES_P
198 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
199 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
200 #endif
202 /* This array records the insn_code of insns to perform block moves. */
203 enum insn_code movstr_optab[NUM_MACHINE_MODES];
205 /* This array records the insn_code of insns to perform block clears. */
206 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
208 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
210 #ifndef SLOW_UNALIGNED_ACCESS
211 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
212 #endif
214 /* This is run once per compilation to set up which modes can be used
215 directly in memory and to initialize the block move optab. */
217 void
218 init_expr_once ()
220 rtx insn, pat;
221 enum machine_mode mode;
222 int num_clobbers;
223 rtx mem, mem1;
225 start_sequence ();
227 /* Try indexing by frame ptr and try by stack ptr.
228 It is known that on the Convex the stack ptr isn't a valid index.
229 With luck, one or the other is valid on any machine. */
230 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
231 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
233 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
234 pat = PATTERN (insn);
236 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
237 mode = (enum machine_mode) ((int) mode + 1))
239 int regno;
240 rtx reg;
242 direct_load[(int) mode] = direct_store[(int) mode] = 0;
243 PUT_MODE (mem, mode);
244 PUT_MODE (mem1, mode);
246 /* See if there is some register that can be used in this mode and
247 directly loaded or stored from memory. */
249 if (mode != VOIDmode && mode != BLKmode)
250 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
251 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
252 regno++)
254 if (! HARD_REGNO_MODE_OK (regno, mode))
255 continue;
257 reg = gen_rtx_REG (mode, regno);
259 SET_SRC (pat) = mem;
260 SET_DEST (pat) = reg;
261 if (recog (pat, insn, &num_clobbers) >= 0)
262 direct_load[(int) mode] = 1;
264 SET_SRC (pat) = mem1;
265 SET_DEST (pat) = reg;
266 if (recog (pat, insn, &num_clobbers) >= 0)
267 direct_load[(int) mode] = 1;
269 SET_SRC (pat) = reg;
270 SET_DEST (pat) = mem;
271 if (recog (pat, insn, &num_clobbers) >= 0)
272 direct_store[(int) mode] = 1;
274 SET_SRC (pat) = reg;
275 SET_DEST (pat) = mem1;
276 if (recog (pat, insn, &num_clobbers) >= 0)
277 direct_store[(int) mode] = 1;
281 end_sequence ();
284 /* This is run at the start of compiling a function. */
286 void
287 init_expr ()
289 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
291 pending_chain = 0;
292 pending_stack_adjust = 0;
293 stack_pointer_delta = 0;
294 inhibit_defer_pop = 0;
295 saveregs_value = 0;
296 apply_args_value = 0;
297 forced_labels = 0;
300 void
301 mark_expr_status (p)
302 struct expr_status *p;
304 if (p == NULL)
305 return;
307 ggc_mark_rtx (p->x_saveregs_value);
308 ggc_mark_rtx (p->x_apply_args_value);
309 ggc_mark_rtx (p->x_forced_labels);
312 void
313 free_expr_status (f)
314 struct function *f;
316 free (f->expr);
317 f->expr = NULL;
320 /* Small sanity check that the queue is empty at the end of a function. */
322 void
323 finish_expr_for_function ()
325 if (pending_chain)
326 abort ();
329 /* Manage the queue of increment instructions to be output
330 for POSTINCREMENT_EXPR expressions, etc. */
332 /* Queue up to increment (or change) VAR later. BODY says how:
333 BODY should be the same thing you would pass to emit_insn
334 to increment right away. It will go to emit_insn later on.
336 The value is a QUEUED expression to be used in place of VAR
337 where you want to guarantee the pre-incrementation value of VAR. */
339 static rtx
340 enqueue_insn (var, body)
341 rtx var, body;
343 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
344 body, pending_chain);
345 return pending_chain;
348 /* Use protect_from_queue to convert a QUEUED expression
349 into something that you can put immediately into an instruction.
350 If the queued incrementation has not happened yet,
351 protect_from_queue returns the variable itself.
352 If the incrementation has happened, protect_from_queue returns a temp
353 that contains a copy of the old value of the variable.
355 Any time an rtx which might possibly be a QUEUED is to be put
356 into an instruction, it must be passed through protect_from_queue first.
357 QUEUED expressions are not meaningful in instructions.
359 Do not pass a value through protect_from_queue and then hold
360 on to it for a while before putting it in an instruction!
361 If the queue is flushed in between, incorrect code will result. */
364 protect_from_queue (x, modify)
365 register rtx x;
366 int modify;
368 register RTX_CODE code = GET_CODE (x);
370 #if 0 /* A QUEUED can hang around after the queue is forced out. */
371 /* Shortcut for most common case. */
372 if (pending_chain == 0)
373 return x;
374 #endif
376 if (code != QUEUED)
378 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
379 use of autoincrement. Make a copy of the contents of the memory
380 location rather than a copy of the address, but not if the value is
381 of mode BLKmode. Don't modify X in place since it might be
382 shared. */
383 if (code == MEM && GET_MODE (x) != BLKmode
384 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
386 register rtx y = XEXP (x, 0);
387 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
389 MEM_COPY_ATTRIBUTES (new, x);
391 if (QUEUED_INSN (y))
393 register rtx temp = gen_reg_rtx (GET_MODE (new));
394 emit_insn_before (gen_move_insn (temp, new),
395 QUEUED_INSN (y));
396 return temp;
398 /* Copy the address into a pseudo, so that the returned value
399 remains correct across calls to emit_queue. */
400 XEXP (new, 0) = copy_to_reg (XEXP (new, 0));
401 return new;
403 /* Otherwise, recursively protect the subexpressions of all
404 the kinds of rtx's that can contain a QUEUED. */
405 if (code == MEM)
407 rtx tem = protect_from_queue (XEXP (x, 0), 0);
408 if (tem != XEXP (x, 0))
410 x = copy_rtx (x);
411 XEXP (x, 0) = tem;
414 else if (code == PLUS || code == MULT)
416 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
417 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
418 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
420 x = copy_rtx (x);
421 XEXP (x, 0) = new0;
422 XEXP (x, 1) = new1;
425 return x;
427 /* If the increment has not happened, use the variable itself. Copy it
428 into a new pseudo so that the value remains correct across calls to
429 emit_queue. */
430 if (QUEUED_INSN (x) == 0)
431 return copy_to_reg (QUEUED_VAR (x));
432 /* If the increment has happened and a pre-increment copy exists,
433 use that copy. */
434 if (QUEUED_COPY (x) != 0)
435 return QUEUED_COPY (x);
436 /* The increment has happened but we haven't set up a pre-increment copy.
437 Set one up now, and use it. */
438 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
439 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
440 QUEUED_INSN (x));
441 return QUEUED_COPY (x);
444 /* Return nonzero if X contains a QUEUED expression:
445 if it contains anything that will be altered by a queued increment.
446 We handle only combinations of MEM, PLUS, MINUS and MULT operators
447 since memory addresses generally contain only those. */
450 queued_subexp_p (x)
451 rtx x;
453 register enum rtx_code code = GET_CODE (x);
454 switch (code)
456 case QUEUED:
457 return 1;
458 case MEM:
459 return queued_subexp_p (XEXP (x, 0));
460 case MULT:
461 case PLUS:
462 case MINUS:
463 return (queued_subexp_p (XEXP (x, 0))
464 || queued_subexp_p (XEXP (x, 1)));
465 default:
466 return 0;
470 /* Perform all the pending incrementations. */
472 void
473 emit_queue ()
475 register rtx p;
476 while ((p = pending_chain))
478 rtx body = QUEUED_BODY (p);
480 if (GET_CODE (body) == SEQUENCE)
482 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
483 emit_insn (QUEUED_BODY (p));
485 else
486 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
487 pending_chain = QUEUED_NEXT (p);
491 /* Copy data from FROM to TO, where the machine modes are not the same.
492 Both modes may be integer, or both may be floating.
493 UNSIGNEDP should be nonzero if FROM is an unsigned type.
494 This causes zero-extension instead of sign-extension. */
496 void
497 convert_move (to, from, unsignedp)
498 register rtx to, from;
499 int unsignedp;
501 enum machine_mode to_mode = GET_MODE (to);
502 enum machine_mode from_mode = GET_MODE (from);
503 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
504 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
505 enum insn_code code;
506 rtx libcall;
508 /* rtx code for making an equivalent value. */
509 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
511 to = protect_from_queue (to, 1);
512 from = protect_from_queue (from, 0);
514 if (to_real != from_real)
515 abort ();
517 /* If FROM is a SUBREG that indicates that we have already done at least
518 the required extension, strip it. We don't handle such SUBREGs as
519 TO here. */
521 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
522 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
523 >= GET_MODE_SIZE (to_mode))
524 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
525 from = gen_lowpart (to_mode, from), from_mode = to_mode;
527 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
528 abort ();
530 if (to_mode == from_mode
531 || (from_mode == VOIDmode && CONSTANT_P (from)))
533 emit_move_insn (to, from);
534 return;
537 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
539 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
540 abort ();
542 if (VECTOR_MODE_P (to_mode))
543 from = gen_rtx_SUBREG (to_mode, from, 0);
544 else
545 to = gen_rtx_SUBREG (from_mode, to, 0);
547 emit_move_insn (to, from);
548 return;
551 if (to_real != from_real)
552 abort ();
554 if (to_real)
556 rtx value, insns;
558 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
560 /* Try converting directly if the insn is supported. */
561 if ((code = can_extend_p (to_mode, from_mode, 0))
562 != CODE_FOR_nothing)
564 emit_unop_insn (code, to, from, UNKNOWN);
565 return;
569 #ifdef HAVE_trunchfqf2
570 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
572 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
573 return;
575 #endif
576 #ifdef HAVE_trunctqfqf2
577 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
579 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
580 return;
582 #endif
583 #ifdef HAVE_truncsfqf2
584 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
586 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
587 return;
589 #endif
590 #ifdef HAVE_truncdfqf2
591 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
593 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
594 return;
596 #endif
597 #ifdef HAVE_truncxfqf2
598 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
600 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
601 return;
603 #endif
604 #ifdef HAVE_trunctfqf2
605 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
607 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
608 return;
610 #endif
612 #ifdef HAVE_trunctqfhf2
613 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
615 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
616 return;
618 #endif
619 #ifdef HAVE_truncsfhf2
620 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
622 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
623 return;
625 #endif
626 #ifdef HAVE_truncdfhf2
627 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
629 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
630 return;
632 #endif
633 #ifdef HAVE_truncxfhf2
634 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
636 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
637 return;
639 #endif
640 #ifdef HAVE_trunctfhf2
641 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
643 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
644 return;
646 #endif
648 #ifdef HAVE_truncsftqf2
649 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
651 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
652 return;
654 #endif
655 #ifdef HAVE_truncdftqf2
656 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
658 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
659 return;
661 #endif
662 #ifdef HAVE_truncxftqf2
663 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
665 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
666 return;
668 #endif
669 #ifdef HAVE_trunctftqf2
670 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
672 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
673 return;
675 #endif
677 #ifdef HAVE_truncdfsf2
678 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
680 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
681 return;
683 #endif
684 #ifdef HAVE_truncxfsf2
685 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
687 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
688 return;
690 #endif
691 #ifdef HAVE_trunctfsf2
692 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
694 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
695 return;
697 #endif
698 #ifdef HAVE_truncxfdf2
699 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
701 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
702 return;
704 #endif
705 #ifdef HAVE_trunctfdf2
706 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
708 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
709 return;
711 #endif
713 libcall = (rtx) 0;
714 switch (from_mode)
716 case SFmode:
717 switch (to_mode)
719 case DFmode:
720 libcall = extendsfdf2_libfunc;
721 break;
723 case XFmode:
724 libcall = extendsfxf2_libfunc;
725 break;
727 case TFmode:
728 libcall = extendsftf2_libfunc;
729 break;
731 default:
732 break;
734 break;
736 case DFmode:
737 switch (to_mode)
739 case SFmode:
740 libcall = truncdfsf2_libfunc;
741 break;
743 case XFmode:
744 libcall = extenddfxf2_libfunc;
745 break;
747 case TFmode:
748 libcall = extenddftf2_libfunc;
749 break;
751 default:
752 break;
754 break;
756 case XFmode:
757 switch (to_mode)
759 case SFmode:
760 libcall = truncxfsf2_libfunc;
761 break;
763 case DFmode:
764 libcall = truncxfdf2_libfunc;
765 break;
767 default:
768 break;
770 break;
772 case TFmode:
773 switch (to_mode)
775 case SFmode:
776 libcall = trunctfsf2_libfunc;
777 break;
779 case DFmode:
780 libcall = trunctfdf2_libfunc;
781 break;
783 default:
784 break;
786 break;
788 default:
789 break;
792 if (libcall == (rtx) 0)
793 /* This conversion is not implemented yet. */
794 abort ();
796 start_sequence ();
797 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
798 1, from, from_mode);
799 insns = get_insns ();
800 end_sequence ();
801 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
802 from));
803 return;
806 /* Now both modes are integers. */
808 /* Handle expanding beyond a word. */
809 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
810 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
812 rtx insns;
813 rtx lowpart;
814 rtx fill_value;
815 rtx lowfrom;
816 int i;
817 enum machine_mode lowpart_mode;
818 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
820 /* Try converting directly if the insn is supported. */
821 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
822 != CODE_FOR_nothing)
824 /* If FROM is a SUBREG, put it into a register. Do this
825 so that we always generate the same set of insns for
826 better cse'ing; if an intermediate assignment occurred,
827 we won't be doing the operation directly on the SUBREG. */
828 if (optimize > 0 && GET_CODE (from) == SUBREG)
829 from = force_reg (from_mode, from);
830 emit_unop_insn (code, to, from, equiv_code);
831 return;
833 /* Next, try converting via full word. */
834 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
835 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
836 != CODE_FOR_nothing))
838 if (GET_CODE (to) == REG)
839 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
840 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
841 emit_unop_insn (code, to,
842 gen_lowpart (word_mode, to), equiv_code);
843 return;
846 /* No special multiword conversion insn; do it by hand. */
847 start_sequence ();
849 /* Since we will turn this into a no conflict block, we must ensure
850 that the source does not overlap the target. */
852 if (reg_overlap_mentioned_p (to, from))
853 from = force_reg (from_mode, from);
855 /* Get a copy of FROM widened to a word, if necessary. */
856 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
857 lowpart_mode = word_mode;
858 else
859 lowpart_mode = from_mode;
861 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
863 lowpart = gen_lowpart (lowpart_mode, to);
864 emit_move_insn (lowpart, lowfrom);
866 /* Compute the value to put in each remaining word. */
867 if (unsignedp)
868 fill_value = const0_rtx;
869 else
871 #ifdef HAVE_slt
872 if (HAVE_slt
873 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
874 && STORE_FLAG_VALUE == -1)
876 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
877 lowpart_mode, 0, 0);
878 fill_value = gen_reg_rtx (word_mode);
879 emit_insn (gen_slt (fill_value));
881 else
882 #endif
884 fill_value
885 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
886 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
887 NULL_RTX, 0);
888 fill_value = convert_to_mode (word_mode, fill_value, 1);
892 /* Fill the remaining words. */
893 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
895 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
896 rtx subword = operand_subword (to, index, 1, to_mode);
898 if (subword == 0)
899 abort ();
901 if (fill_value != subword)
902 emit_move_insn (subword, fill_value);
905 insns = get_insns ();
906 end_sequence ();
908 emit_no_conflict_block (insns, to, from, NULL_RTX,
909 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
910 return;
913 /* Truncating multi-word to a word or less. */
914 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
915 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
917 if (!((GET_CODE (from) == MEM
918 && ! MEM_VOLATILE_P (from)
919 && direct_load[(int) to_mode]
920 && ! mode_dependent_address_p (XEXP (from, 0)))
921 || GET_CODE (from) == REG
922 || GET_CODE (from) == SUBREG))
923 from = force_reg (from_mode, from);
924 convert_move (to, gen_lowpart (word_mode, from), 0);
925 return;
928 /* Handle pointer conversion. */ /* SPEE 900220. */
929 if (to_mode == PQImode)
931 if (from_mode != QImode)
932 from = convert_to_mode (QImode, from, unsignedp);
934 #ifdef HAVE_truncqipqi2
935 if (HAVE_truncqipqi2)
937 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
938 return;
940 #endif /* HAVE_truncqipqi2 */
941 abort ();
944 if (from_mode == PQImode)
946 if (to_mode != QImode)
948 from = convert_to_mode (QImode, from, unsignedp);
949 from_mode = QImode;
951 else
953 #ifdef HAVE_extendpqiqi2
954 if (HAVE_extendpqiqi2)
956 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
957 return;
959 #endif /* HAVE_extendpqiqi2 */
960 abort ();
964 if (to_mode == PSImode)
966 if (from_mode != SImode)
967 from = convert_to_mode (SImode, from, unsignedp);
969 #ifdef HAVE_truncsipsi2
970 if (HAVE_truncsipsi2)
972 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
973 return;
975 #endif /* HAVE_truncsipsi2 */
976 abort ();
979 if (from_mode == PSImode)
981 if (to_mode != SImode)
983 from = convert_to_mode (SImode, from, unsignedp);
984 from_mode = SImode;
986 else
988 #ifdef HAVE_extendpsisi2
989 if (! unsignedp && HAVE_extendpsisi2)
991 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
992 return;
994 #endif /* HAVE_extendpsisi2 */
995 #ifdef HAVE_zero_extendpsisi2
996 if (unsignedp && HAVE_zero_extendpsisi2)
998 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
999 return;
1001 #endif /* HAVE_zero_extendpsisi2 */
1002 abort ();
1006 if (to_mode == PDImode)
1008 if (from_mode != DImode)
1009 from = convert_to_mode (DImode, from, unsignedp);
1011 #ifdef HAVE_truncdipdi2
1012 if (HAVE_truncdipdi2)
1014 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1015 return;
1017 #endif /* HAVE_truncdipdi2 */
1018 abort ();
1021 if (from_mode == PDImode)
1023 if (to_mode != DImode)
1025 from = convert_to_mode (DImode, from, unsignedp);
1026 from_mode = DImode;
1028 else
1030 #ifdef HAVE_extendpdidi2
1031 if (HAVE_extendpdidi2)
1033 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1034 return;
1036 #endif /* HAVE_extendpdidi2 */
1037 abort ();
1041 /* Now follow all the conversions between integers
1042 no more than a word long. */
1044 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1045 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1046 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1047 GET_MODE_BITSIZE (from_mode)))
1049 if (!((GET_CODE (from) == MEM
1050 && ! MEM_VOLATILE_P (from)
1051 && direct_load[(int) to_mode]
1052 && ! mode_dependent_address_p (XEXP (from, 0)))
1053 || GET_CODE (from) == REG
1054 || GET_CODE (from) == SUBREG))
1055 from = force_reg (from_mode, from);
1056 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1057 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1058 from = copy_to_reg (from);
1059 emit_move_insn (to, gen_lowpart (to_mode, from));
1060 return;
1063 /* Handle extension. */
1064 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1066 /* Convert directly if that works. */
1067 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1068 != CODE_FOR_nothing)
1070 emit_unop_insn (code, to, from, equiv_code);
1071 return;
1073 else
1075 enum machine_mode intermediate;
1076 rtx tmp;
1077 tree shift_amount;
1079 /* Search for a mode to convert via. */
1080 for (intermediate = from_mode; intermediate != VOIDmode;
1081 intermediate = GET_MODE_WIDER_MODE (intermediate))
1082 if (((can_extend_p (to_mode, intermediate, unsignedp)
1083 != CODE_FOR_nothing)
1084 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1085 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1086 GET_MODE_BITSIZE (intermediate))))
1087 && (can_extend_p (intermediate, from_mode, unsignedp)
1088 != CODE_FOR_nothing))
1090 convert_move (to, convert_to_mode (intermediate, from,
1091 unsignedp), unsignedp);
1092 return;
1095 /* No suitable intermediate mode.
1096 Generate what we need with shifts. */
1097 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1098 - GET_MODE_BITSIZE (from_mode), 0);
1099 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1100 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1101 to, unsignedp);
1102 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1103 to, unsignedp);
1104 if (tmp != to)
1105 emit_move_insn (to, tmp);
1106 return;
1110 /* Support special truncate insns for certain modes. */
1112 if (from_mode == DImode && to_mode == SImode)
1114 #ifdef HAVE_truncdisi2
1115 if (HAVE_truncdisi2)
1117 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1118 return;
1120 #endif
1121 convert_move (to, force_reg (from_mode, from), unsignedp);
1122 return;
1125 if (from_mode == DImode && to_mode == HImode)
1127 #ifdef HAVE_truncdihi2
1128 if (HAVE_truncdihi2)
1130 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1131 return;
1133 #endif
1134 convert_move (to, force_reg (from_mode, from), unsignedp);
1135 return;
1138 if (from_mode == DImode && to_mode == QImode)
1140 #ifdef HAVE_truncdiqi2
1141 if (HAVE_truncdiqi2)
1143 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1144 return;
1146 #endif
1147 convert_move (to, force_reg (from_mode, from), unsignedp);
1148 return;
1151 if (from_mode == SImode && to_mode == HImode)
1153 #ifdef HAVE_truncsihi2
1154 if (HAVE_truncsihi2)
1156 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1157 return;
1159 #endif
1160 convert_move (to, force_reg (from_mode, from), unsignedp);
1161 return;
1164 if (from_mode == SImode && to_mode == QImode)
1166 #ifdef HAVE_truncsiqi2
1167 if (HAVE_truncsiqi2)
1169 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1170 return;
1172 #endif
1173 convert_move (to, force_reg (from_mode, from), unsignedp);
1174 return;
1177 if (from_mode == HImode && to_mode == QImode)
1179 #ifdef HAVE_trunchiqi2
1180 if (HAVE_trunchiqi2)
1182 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1183 return;
1185 #endif
1186 convert_move (to, force_reg (from_mode, from), unsignedp);
1187 return;
1190 if (from_mode == TImode && to_mode == DImode)
1192 #ifdef HAVE_trunctidi2
1193 if (HAVE_trunctidi2)
1195 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1196 return;
1198 #endif
1199 convert_move (to, force_reg (from_mode, from), unsignedp);
1200 return;
1203 if (from_mode == TImode && to_mode == SImode)
1205 #ifdef HAVE_trunctisi2
1206 if (HAVE_trunctisi2)
1208 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1209 return;
1211 #endif
1212 convert_move (to, force_reg (from_mode, from), unsignedp);
1213 return;
1216 if (from_mode == TImode && to_mode == HImode)
1218 #ifdef HAVE_trunctihi2
1219 if (HAVE_trunctihi2)
1221 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1222 return;
1224 #endif
1225 convert_move (to, force_reg (from_mode, from), unsignedp);
1226 return;
1229 if (from_mode == TImode && to_mode == QImode)
1231 #ifdef HAVE_trunctiqi2
1232 if (HAVE_trunctiqi2)
1234 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1235 return;
1237 #endif
1238 convert_move (to, force_reg (from_mode, from), unsignedp);
1239 return;
1242 /* Handle truncation of volatile memrefs, and so on;
1243 the things that couldn't be truncated directly,
1244 and for which there was no special instruction. */
1245 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1247 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1248 emit_move_insn (to, temp);
1249 return;
1252 /* Mode combination is not recognized. */
1253 abort ();
1256 /* Return an rtx for a value that would result
1257 from converting X to mode MODE.
1258 Both X and MODE may be floating, or both integer.
1259 UNSIGNEDP is nonzero if X is an unsigned value.
1260 This can be done by referring to a part of X in place
1261 or by copying to a new temporary with conversion.
1263 This function *must not* call protect_from_queue
1264 except when putting X into an insn (in which case convert_move does it). */
1267 convert_to_mode (mode, x, unsignedp)
1268 enum machine_mode mode;
1269 rtx x;
1270 int unsignedp;
1272 return convert_modes (mode, VOIDmode, x, unsignedp);
1275 /* Return an rtx for a value that would result
1276 from converting X from mode OLDMODE to mode MODE.
1277 Both modes may be floating, or both integer.
1278 UNSIGNEDP is nonzero if X is an unsigned value.
1280 This can be done by referring to a part of X in place
1281 or by copying to a new temporary with conversion.
1283 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1285 This function *must not* call protect_from_queue
1286 except when putting X into an insn (in which case convert_move does it). */
1289 convert_modes (mode, oldmode, x, unsignedp)
1290 enum machine_mode mode, oldmode;
1291 rtx x;
1292 int unsignedp;
1294 register rtx temp;
1296 /* If FROM is a SUBREG that indicates that we have already done at least
1297 the required extension, strip it. */
1299 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1300 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1301 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1302 x = gen_lowpart (mode, x);
1304 if (GET_MODE (x) != VOIDmode)
1305 oldmode = GET_MODE (x);
1307 if (mode == oldmode)
1308 return x;
1310 /* There is one case that we must handle specially: If we are converting
1311 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1312 we are to interpret the constant as unsigned, gen_lowpart will do
1313 the wrong if the constant appears negative. What we want to do is
1314 make the high-order word of the constant zero, not all ones. */
1316 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1317 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1318 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1320 HOST_WIDE_INT val = INTVAL (x);
1322 if (oldmode != VOIDmode
1323 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1325 int width = GET_MODE_BITSIZE (oldmode);
1327 /* We need to zero extend VAL. */
1328 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1331 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1334 /* We can do this with a gen_lowpart if both desired and current modes
1335 are integer, and this is either a constant integer, a register, or a
1336 non-volatile MEM. Except for the constant case where MODE is no
1337 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1339 if ((GET_CODE (x) == CONST_INT
1340 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1341 || (GET_MODE_CLASS (mode) == MODE_INT
1342 && GET_MODE_CLASS (oldmode) == MODE_INT
1343 && (GET_CODE (x) == CONST_DOUBLE
1344 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1345 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1346 && direct_load[(int) mode])
1347 || (GET_CODE (x) == REG
1348 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1349 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1351 /* ?? If we don't know OLDMODE, we have to assume here that
1352 X does not need sign- or zero-extension. This may not be
1353 the case, but it's the best we can do. */
1354 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1355 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1357 HOST_WIDE_INT val = INTVAL (x);
1358 int width = GET_MODE_BITSIZE (oldmode);
1360 /* We must sign or zero-extend in this case. Start by
1361 zero-extending, then sign extend if we need to. */
1362 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1363 if (! unsignedp
1364 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1365 val |= (HOST_WIDE_INT) (-1) << width;
1367 return GEN_INT (val);
1370 return gen_lowpart (mode, x);
1373 temp = gen_reg_rtx (mode);
1374 convert_move (temp, x, unsignedp);
1375 return temp;
1378 /* This macro is used to determine what the largest unit size that
1379 move_by_pieces can use is. */
1381 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1382 move efficiently, as opposed to MOVE_MAX which is the maximum
1383 number of bytes we can move with a single instruction. */
1385 #ifndef MOVE_MAX_PIECES
1386 #define MOVE_MAX_PIECES MOVE_MAX
1387 #endif
1389 /* Generate several move instructions to copy LEN bytes
1390 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1391 The caller must pass FROM and TO
1392 through protect_from_queue before calling.
1393 ALIGN is maximum alignment we can assume. */
1395 void
1396 move_by_pieces (to, from, len, align)
1397 rtx to, from;
1398 unsigned HOST_WIDE_INT len;
1399 unsigned int align;
1401 struct move_by_pieces data;
1402 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1403 unsigned int max_size = MOVE_MAX_PIECES + 1;
1404 enum machine_mode mode = VOIDmode, tmode;
1405 enum insn_code icode;
1407 data.offset = 0;
1408 data.to_addr = to_addr;
1409 data.from_addr = from_addr;
1410 data.to = to;
1411 data.from = from;
1412 data.autinc_to
1413 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1414 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1415 data.autinc_from
1416 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1417 || GET_CODE (from_addr) == POST_INC
1418 || GET_CODE (from_addr) == POST_DEC);
1420 data.explicit_inc_from = 0;
1421 data.explicit_inc_to = 0;
1422 data.reverse
1423 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1424 if (data.reverse) data.offset = len;
1425 data.len = len;
1427 /* If copying requires more than two move insns,
1428 copy addresses to registers (to make displacements shorter)
1429 and use post-increment if available. */
1430 if (!(data.autinc_from && data.autinc_to)
1431 && move_by_pieces_ninsns (len, align) > 2)
1433 /* Find the mode of the largest move... */
1434 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1435 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1436 if (GET_MODE_SIZE (tmode) < max_size)
1437 mode = tmode;
1439 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1441 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1442 data.autinc_from = 1;
1443 data.explicit_inc_from = -1;
1445 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1447 data.from_addr = copy_addr_to_reg (from_addr);
1448 data.autinc_from = 1;
1449 data.explicit_inc_from = 1;
1451 if (!data.autinc_from && CONSTANT_P (from_addr))
1452 data.from_addr = copy_addr_to_reg (from_addr);
1453 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1455 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1456 data.autinc_to = 1;
1457 data.explicit_inc_to = -1;
1459 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1461 data.to_addr = copy_addr_to_reg (to_addr);
1462 data.autinc_to = 1;
1463 data.explicit_inc_to = 1;
1465 if (!data.autinc_to && CONSTANT_P (to_addr))
1466 data.to_addr = copy_addr_to_reg (to_addr);
1469 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1470 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1471 align = MOVE_MAX * BITS_PER_UNIT;
1473 /* First move what we can in the largest integer mode, then go to
1474 successively smaller modes. */
1476 while (max_size > 1)
1478 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1479 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1480 if (GET_MODE_SIZE (tmode) < max_size)
1481 mode = tmode;
1483 if (mode == VOIDmode)
1484 break;
1486 icode = mov_optab->handlers[(int) mode].insn_code;
1487 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1488 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1490 max_size = GET_MODE_SIZE (mode);
1493 /* The code above should have handled everything. */
1494 if (data.len > 0)
1495 abort ();
1498 /* Return number of insns required to move L bytes by pieces.
1499 ALIGN (in bits) is maximum alignment we can assume. */
1501 static unsigned HOST_WIDE_INT
1502 move_by_pieces_ninsns (l, align)
1503 unsigned HOST_WIDE_INT l;
1504 unsigned int align;
1506 unsigned HOST_WIDE_INT n_insns = 0;
1507 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1509 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1510 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1511 align = MOVE_MAX * BITS_PER_UNIT;
1513 while (max_size > 1)
1515 enum machine_mode mode = VOIDmode, tmode;
1516 enum insn_code icode;
1518 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1519 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1520 if (GET_MODE_SIZE (tmode) < max_size)
1521 mode = tmode;
1523 if (mode == VOIDmode)
1524 break;
1526 icode = mov_optab->handlers[(int) mode].insn_code;
1527 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1528 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1530 max_size = GET_MODE_SIZE (mode);
1533 if (l)
1534 abort ();
1535 return n_insns;
1538 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1539 with move instructions for mode MODE. GENFUN is the gen_... function
1540 to make a move insn for that mode. DATA has all the other info. */
1542 static void
1543 move_by_pieces_1 (genfun, mode, data)
1544 rtx (*genfun) PARAMS ((rtx, ...));
1545 enum machine_mode mode;
1546 struct move_by_pieces *data;
1548 unsigned int size = GET_MODE_SIZE (mode);
1549 rtx to1, from1;
1551 while (data->len >= size)
1553 if (data->reverse)
1554 data->offset -= size;
1556 if (data->autinc_to)
1558 to1 = gen_rtx_MEM (mode, data->to_addr);
1559 MEM_COPY_ATTRIBUTES (to1, data->to);
1561 else
1562 to1 = change_address (data->to, mode,
1563 plus_constant (data->to_addr, data->offset));
1565 if (data->autinc_from)
1567 from1 = gen_rtx_MEM (mode, data->from_addr);
1568 MEM_COPY_ATTRIBUTES (from1, data->from);
1570 else
1571 from1 = change_address (data->from, mode,
1572 plus_constant (data->from_addr, data->offset));
1574 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1575 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1576 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1577 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1579 emit_insn ((*genfun) (to1, from1));
1581 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1582 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1583 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1584 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1586 if (! data->reverse)
1587 data->offset += size;
1589 data->len -= size;
1593 /* Emit code to move a block Y to a block X.
1594 This may be done with string-move instructions,
1595 with multiple scalar move instructions, or with a library call.
1597 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1598 with mode BLKmode.
1599 SIZE is an rtx that says how long they are.
1600 ALIGN is the maximum alignment we can assume they have.
1602 Return the address of the new block, if memcpy is called and returns it,
1603 0 otherwise. */
1606 emit_block_move (x, y, size, align)
1607 rtx x, y;
1608 rtx size;
1609 unsigned int align;
1611 rtx retval = 0;
1612 #ifdef TARGET_MEM_FUNCTIONS
1613 static tree fn;
1614 tree call_expr, arg_list;
1615 #endif
1617 if (GET_MODE (x) != BLKmode)
1618 abort ();
1620 if (GET_MODE (y) != BLKmode)
1621 abort ();
1623 x = protect_from_queue (x, 1);
1624 y = protect_from_queue (y, 0);
1625 size = protect_from_queue (size, 0);
1627 if (GET_CODE (x) != MEM)
1628 abort ();
1629 if (GET_CODE (y) != MEM)
1630 abort ();
1631 if (size == 0)
1632 abort ();
1634 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1635 move_by_pieces (x, y, INTVAL (size), align);
1636 else
1638 /* Try the most limited insn first, because there's no point
1639 including more than one in the machine description unless
1640 the more limited one has some advantage. */
1642 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1643 enum machine_mode mode;
1645 /* Since this is a move insn, we don't care about volatility. */
1646 volatile_ok = 1;
1648 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1649 mode = GET_MODE_WIDER_MODE (mode))
1651 enum insn_code code = movstr_optab[(int) mode];
1652 insn_operand_predicate_fn pred;
1654 if (code != CODE_FOR_nothing
1655 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1656 here because if SIZE is less than the mode mask, as it is
1657 returned by the macro, it will definitely be less than the
1658 actual mode mask. */
1659 && ((GET_CODE (size) == CONST_INT
1660 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1661 <= (GET_MODE_MASK (mode) >> 1)))
1662 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1663 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1664 || (*pred) (x, BLKmode))
1665 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1666 || (*pred) (y, BLKmode))
1667 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1668 || (*pred) (opalign, VOIDmode)))
1670 rtx op2;
1671 rtx last = get_last_insn ();
1672 rtx pat;
1674 op2 = convert_to_mode (mode, size, 1);
1675 pred = insn_data[(int) code].operand[2].predicate;
1676 if (pred != 0 && ! (*pred) (op2, mode))
1677 op2 = copy_to_mode_reg (mode, op2);
1679 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1680 if (pat)
1682 emit_insn (pat);
1683 volatile_ok = 0;
1684 return 0;
1686 else
1687 delete_insns_since (last);
1691 volatile_ok = 0;
1693 /* X, Y, or SIZE may have been passed through protect_from_queue.
1695 It is unsafe to save the value generated by protect_from_queue
1696 and reuse it later. Consider what happens if emit_queue is
1697 called before the return value from protect_from_queue is used.
1699 Expansion of the CALL_EXPR below will call emit_queue before
1700 we are finished emitting RTL for argument setup. So if we are
1701 not careful we could get the wrong value for an argument.
1703 To avoid this problem we go ahead and emit code to copy X, Y &
1704 SIZE into new pseudos. We can then place those new pseudos
1705 into an RTL_EXPR and use them later, even after a call to
1706 emit_queue.
1708 Note this is not strictly needed for library calls since they
1709 do not call emit_queue before loading their arguments. However,
1710 we may need to have library calls call emit_queue in the future
1711 since failing to do so could cause problems for targets which
1712 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1713 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1714 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1716 #ifdef TARGET_MEM_FUNCTIONS
1717 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1718 #else
1719 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1720 TREE_UNSIGNED (integer_type_node));
1721 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1722 #endif
1724 #ifdef TARGET_MEM_FUNCTIONS
1725 /* It is incorrect to use the libcall calling conventions to call
1726 memcpy in this context.
1728 This could be a user call to memcpy and the user may wish to
1729 examine the return value from memcpy.
1731 For targets where libcalls and normal calls have different conventions
1732 for returning pointers, we could end up generating incorrect code.
1734 So instead of using a libcall sequence we build up a suitable
1735 CALL_EXPR and expand the call in the normal fashion. */
1736 if (fn == NULL_TREE)
1738 tree fntype;
1740 /* This was copied from except.c, I don't know if all this is
1741 necessary in this context or not. */
1742 fn = get_identifier ("memcpy");
1743 fntype = build_pointer_type (void_type_node);
1744 fntype = build_function_type (fntype, NULL_TREE);
1745 fn = build_decl (FUNCTION_DECL, fn, fntype);
1746 ggc_add_tree_root (&fn, 1);
1747 DECL_EXTERNAL (fn) = 1;
1748 TREE_PUBLIC (fn) = 1;
1749 DECL_ARTIFICIAL (fn) = 1;
1750 TREE_NOTHROW (fn) = 1;
1751 make_decl_rtl (fn, NULL_PTR);
1752 assemble_external (fn);
1755 /* We need to make an argument list for the function call.
1757 memcpy has three arguments, the first two are void * addresses and
1758 the last is a size_t byte count for the copy. */
1759 arg_list
1760 = build_tree_list (NULL_TREE,
1761 make_tree (build_pointer_type (void_type_node), x));
1762 TREE_CHAIN (arg_list)
1763 = build_tree_list (NULL_TREE,
1764 make_tree (build_pointer_type (void_type_node), y));
1765 TREE_CHAIN (TREE_CHAIN (arg_list))
1766 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1767 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1769 /* Now we have to build up the CALL_EXPR itself. */
1770 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1771 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1772 call_expr, arg_list, NULL_TREE);
1773 TREE_SIDE_EFFECTS (call_expr) = 1;
1775 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1776 #else
1777 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1778 VOIDmode, 3, y, Pmode, x, Pmode,
1779 convert_to_mode (TYPE_MODE (integer_type_node), size,
1780 TREE_UNSIGNED (integer_type_node)),
1781 TYPE_MODE (integer_type_node));
1782 #endif
1785 return retval;
1788 /* Copy all or part of a value X into registers starting at REGNO.
1789 The number of registers to be filled is NREGS. */
1791 void
1792 move_block_to_reg (regno, x, nregs, mode)
1793 int regno;
1794 rtx x;
1795 int nregs;
1796 enum machine_mode mode;
1798 int i;
1799 #ifdef HAVE_load_multiple
1800 rtx pat;
1801 rtx last;
1802 #endif
1804 if (nregs == 0)
1805 return;
1807 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1808 x = validize_mem (force_const_mem (mode, x));
1810 /* See if the machine can do this with a load multiple insn. */
1811 #ifdef HAVE_load_multiple
1812 if (HAVE_load_multiple)
1814 last = get_last_insn ();
1815 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1816 GEN_INT (nregs));
1817 if (pat)
1819 emit_insn (pat);
1820 return;
1822 else
1823 delete_insns_since (last);
1825 #endif
1827 for (i = 0; i < nregs; i++)
1828 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1829 operand_subword_force (x, i, mode));
1832 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1833 The number of registers to be filled is NREGS. SIZE indicates the number
1834 of bytes in the object X. */
1836 void
1837 move_block_from_reg (regno, x, nregs, size)
1838 int regno;
1839 rtx x;
1840 int nregs;
1841 int size;
1843 int i;
1844 #ifdef HAVE_store_multiple
1845 rtx pat;
1846 rtx last;
1847 #endif
1848 enum machine_mode mode;
1850 if (nregs == 0)
1851 return;
1853 /* If SIZE is that of a mode no bigger than a word, just use that
1854 mode's store operation. */
1855 if (size <= UNITS_PER_WORD
1856 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1858 emit_move_insn (change_address (x, mode, NULL),
1859 gen_rtx_REG (mode, regno));
1860 return;
1863 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1864 to the left before storing to memory. Note that the previous test
1865 doesn't handle all cases (e.g. SIZE == 3). */
1866 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1868 rtx tem = operand_subword (x, 0, 1, BLKmode);
1869 rtx shift;
1871 if (tem == 0)
1872 abort ();
1874 shift = expand_shift (LSHIFT_EXPR, word_mode,
1875 gen_rtx_REG (word_mode, regno),
1876 build_int_2 ((UNITS_PER_WORD - size)
1877 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1878 emit_move_insn (tem, shift);
1879 return;
1882 /* See if the machine can do this with a store multiple insn. */
1883 #ifdef HAVE_store_multiple
1884 if (HAVE_store_multiple)
1886 last = get_last_insn ();
1887 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1888 GEN_INT (nregs));
1889 if (pat)
1891 emit_insn (pat);
1892 return;
1894 else
1895 delete_insns_since (last);
1897 #endif
1899 for (i = 0; i < nregs; i++)
1901 rtx tem = operand_subword (x, i, 1, BLKmode);
1903 if (tem == 0)
1904 abort ();
1906 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1910 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1911 registers represented by a PARALLEL. SSIZE represents the total size of
1912 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1913 SRC in bits. */
1914 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1915 the balance will be in what would be the low-order memory addresses, i.e.
1916 left justified for big endian, right justified for little endian. This
1917 happens to be true for the targets currently using this support. If this
1918 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1919 would be needed. */
1921 void
1922 emit_group_load (dst, orig_src, ssize, align)
1923 rtx dst, orig_src;
1924 unsigned int align;
1925 int ssize;
1927 rtx *tmps, src;
1928 int start, i;
1930 if (GET_CODE (dst) != PARALLEL)
1931 abort ();
1933 /* Check for a NULL entry, used to indicate that the parameter goes
1934 both on the stack and in registers. */
1935 if (XEXP (XVECEXP (dst, 0, 0), 0))
1936 start = 0;
1937 else
1938 start = 1;
1940 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1942 /* Process the pieces. */
1943 for (i = start; i < XVECLEN (dst, 0); i++)
1945 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1946 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1947 unsigned int bytelen = GET_MODE_SIZE (mode);
1948 int shift = 0;
1950 /* Handle trailing fragments that run over the size of the struct. */
1951 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1953 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1954 bytelen = ssize - bytepos;
1955 if (bytelen <= 0)
1956 abort ();
1959 /* If we won't be loading directly from memory, protect the real source
1960 from strange tricks we might play; but make sure that the source can
1961 be loaded directly into the destination. */
1962 src = orig_src;
1963 if (GET_CODE (orig_src) != MEM
1964 && (!CONSTANT_P (orig_src)
1965 || (GET_MODE (orig_src) != mode
1966 && GET_MODE (orig_src) != VOIDmode)))
1968 if (GET_MODE (orig_src) == VOIDmode)
1969 src = gen_reg_rtx (mode);
1970 else
1971 src = gen_reg_rtx (GET_MODE (orig_src));
1972 emit_move_insn (src, orig_src);
1975 /* Optimize the access just a bit. */
1976 if (GET_CODE (src) == MEM
1977 && align >= GET_MODE_ALIGNMENT (mode)
1978 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1979 && bytelen == GET_MODE_SIZE (mode))
1981 tmps[i] = gen_reg_rtx (mode);
1982 emit_move_insn (tmps[i],
1983 change_address (src, mode,
1984 plus_constant (XEXP (src, 0),
1985 bytepos)));
1987 else if (GET_CODE (src) == CONCAT)
1989 if (bytepos == 0
1990 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1991 tmps[i] = XEXP (src, 0);
1992 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1993 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1994 tmps[i] = XEXP (src, 1);
1995 else
1996 abort ();
1998 else if (CONSTANT_P (src)
1999 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2000 tmps[i] = src;
2001 else
2002 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2003 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2004 mode, mode, align, ssize);
2006 if (BYTES_BIG_ENDIAN && shift)
2007 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2008 tmps[i], 0, OPTAB_WIDEN);
2011 emit_queue ();
2013 /* Copy the extracted pieces into the proper (probable) hard regs. */
2014 for (i = start; i < XVECLEN (dst, 0); i++)
2015 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2018 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2019 registers represented by a PARALLEL. SSIZE represents the total size of
2020 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2022 void
2023 emit_group_store (orig_dst, src, ssize, align)
2024 rtx orig_dst, src;
2025 int ssize;
2026 unsigned int align;
2028 rtx *tmps, dst;
2029 int start, i;
2031 if (GET_CODE (src) != PARALLEL)
2032 abort ();
2034 /* Check for a NULL entry, used to indicate that the parameter goes
2035 both on the stack and in registers. */
2036 if (XEXP (XVECEXP (src, 0, 0), 0))
2037 start = 0;
2038 else
2039 start = 1;
2041 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2043 /* Copy the (probable) hard regs into pseudos. */
2044 for (i = start; i < XVECLEN (src, 0); i++)
2046 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2047 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2048 emit_move_insn (tmps[i], reg);
2050 emit_queue ();
2052 /* If we won't be storing directly into memory, protect the real destination
2053 from strange tricks we might play. */
2054 dst = orig_dst;
2055 if (GET_CODE (dst) == PARALLEL)
2057 rtx temp;
2059 /* We can get a PARALLEL dst if there is a conditional expression in
2060 a return statement. In that case, the dst and src are the same,
2061 so no action is necessary. */
2062 if (rtx_equal_p (dst, src))
2063 return;
2065 /* It is unclear if we can ever reach here, but we may as well handle
2066 it. Allocate a temporary, and split this into a store/load to/from
2067 the temporary. */
2069 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2070 emit_group_store (temp, src, ssize, align);
2071 emit_group_load (dst, temp, ssize, align);
2072 return;
2074 else if (GET_CODE (dst) != MEM)
2076 dst = gen_reg_rtx (GET_MODE (orig_dst));
2077 /* Make life a bit easier for combine. */
2078 emit_move_insn (dst, const0_rtx);
2081 /* Process the pieces. */
2082 for (i = start; i < XVECLEN (src, 0); i++)
2084 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2085 enum machine_mode mode = GET_MODE (tmps[i]);
2086 unsigned int bytelen = GET_MODE_SIZE (mode);
2088 /* Handle trailing fragments that run over the size of the struct. */
2089 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2091 if (BYTES_BIG_ENDIAN)
2093 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2094 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2095 tmps[i], 0, OPTAB_WIDEN);
2097 bytelen = ssize - bytepos;
2100 /* Optimize the access just a bit. */
2101 if (GET_CODE (dst) == MEM
2102 && align >= GET_MODE_ALIGNMENT (mode)
2103 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2104 && bytelen == GET_MODE_SIZE (mode))
2105 emit_move_insn (change_address (dst, mode,
2106 plus_constant (XEXP (dst, 0),
2107 bytepos)),
2108 tmps[i]);
2109 else
2110 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2111 mode, tmps[i], align, ssize);
2114 emit_queue ();
2116 /* Copy from the pseudo into the (probable) hard reg. */
2117 if (GET_CODE (dst) == REG)
2118 emit_move_insn (orig_dst, dst);
2121 /* Generate code to copy a BLKmode object of TYPE out of a
2122 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2123 is null, a stack temporary is created. TGTBLK is returned.
2125 The primary purpose of this routine is to handle functions
2126 that return BLKmode structures in registers. Some machines
2127 (the PA for example) want to return all small structures
2128 in registers regardless of the structure's alignment. */
2131 copy_blkmode_from_reg (tgtblk, srcreg, type)
2132 rtx tgtblk;
2133 rtx srcreg;
2134 tree type;
2136 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2137 rtx src = NULL, dst = NULL;
2138 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2139 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2141 if (tgtblk == 0)
2143 tgtblk = assign_temp (build_qualified_type (type,
2144 (TYPE_QUALS (type)
2145 | TYPE_QUAL_CONST)),
2146 0, 1, 1);
2147 preserve_temp_slots (tgtblk);
2150 /* This code assumes srcreg is at least a full word. If it isn't,
2151 copy it into a new pseudo which is a full word. */
2152 if (GET_MODE (srcreg) != BLKmode
2153 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2154 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2156 /* Structures whose size is not a multiple of a word are aligned
2157 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2158 machine, this means we must skip the empty high order bytes when
2159 calculating the bit offset. */
2160 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2161 big_endian_correction
2162 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2164 /* Copy the structure BITSIZE bites at a time.
2166 We could probably emit more efficient code for machines which do not use
2167 strict alignment, but it doesn't seem worth the effort at the current
2168 time. */
2169 for (bitpos = 0, xbitpos = big_endian_correction;
2170 bitpos < bytes * BITS_PER_UNIT;
2171 bitpos += bitsize, xbitpos += bitsize)
2173 /* We need a new source operand each time xbitpos is on a
2174 word boundary and when xbitpos == big_endian_correction
2175 (the first time through). */
2176 if (xbitpos % BITS_PER_WORD == 0
2177 || xbitpos == big_endian_correction)
2178 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, BLKmode);
2180 /* We need a new destination operand each time bitpos is on
2181 a word boundary. */
2182 if (bitpos % BITS_PER_WORD == 0)
2183 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2185 /* Use xbitpos for the source extraction (right justified) and
2186 xbitpos for the destination store (left justified). */
2187 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2188 extract_bit_field (src, bitsize,
2189 xbitpos % BITS_PER_WORD, 1,
2190 NULL_RTX, word_mode, word_mode,
2191 bitsize, BITS_PER_WORD),
2192 bitsize, BITS_PER_WORD);
2195 return tgtblk;
2198 /* Add a USE expression for REG to the (possibly empty) list pointed
2199 to by CALL_FUSAGE. REG must denote a hard register. */
2201 void
2202 use_reg (call_fusage, reg)
2203 rtx *call_fusage, reg;
2205 if (GET_CODE (reg) != REG
2206 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2207 abort ();
2209 *call_fusage
2210 = gen_rtx_EXPR_LIST (VOIDmode,
2211 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2214 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2215 starting at REGNO. All of these registers must be hard registers. */
2217 void
2218 use_regs (call_fusage, regno, nregs)
2219 rtx *call_fusage;
2220 int regno;
2221 int nregs;
2223 int i;
2225 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2226 abort ();
2228 for (i = 0; i < nregs; i++)
2229 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2232 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2233 PARALLEL REGS. This is for calls that pass values in multiple
2234 non-contiguous locations. The Irix 6 ABI has examples of this. */
2236 void
2237 use_group_regs (call_fusage, regs)
2238 rtx *call_fusage;
2239 rtx regs;
2241 int i;
2243 for (i = 0; i < XVECLEN (regs, 0); i++)
2245 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2247 /* A NULL entry means the parameter goes both on the stack and in
2248 registers. This can also be a MEM for targets that pass values
2249 partially on the stack and partially in registers. */
2250 if (reg != 0 && GET_CODE (reg) == REG)
2251 use_reg (call_fusage, reg);
2257 can_store_by_pieces (len, constfun, constfundata, align)
2258 unsigned HOST_WIDE_INT len;
2259 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2260 PTR constfundata;
2261 unsigned int align;
2263 unsigned HOST_WIDE_INT max_size, l;
2264 HOST_WIDE_INT offset = 0;
2265 enum machine_mode mode, tmode;
2266 enum insn_code icode;
2267 int reverse;
2268 rtx cst;
2270 if (! MOVE_BY_PIECES_P (len, align))
2271 return 0;
2273 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2274 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2275 align = MOVE_MAX * BITS_PER_UNIT;
2277 /* We would first store what we can in the largest integer mode, then go to
2278 successively smaller modes. */
2280 for (reverse = 0;
2281 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2282 reverse++)
2284 l = len;
2285 mode = VOIDmode;
2286 max_size = MOVE_MAX_PIECES + 1;
2287 while (max_size > 1)
2289 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2290 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2291 if (GET_MODE_SIZE (tmode) < max_size)
2292 mode = tmode;
2294 if (mode == VOIDmode)
2295 break;
2297 icode = mov_optab->handlers[(int) mode].insn_code;
2298 if (icode != CODE_FOR_nothing
2299 && align >= GET_MODE_ALIGNMENT (mode))
2301 unsigned int size = GET_MODE_SIZE (mode);
2303 while (l >= size)
2305 if (reverse)
2306 offset -= size;
2308 cst = (*constfun) (constfundata, offset, mode);
2309 if (!LEGITIMATE_CONSTANT_P (cst))
2310 return 0;
2312 if (!reverse)
2313 offset += size;
2315 l -= size;
2319 max_size = GET_MODE_SIZE (mode);
2322 /* The code above should have handled everything. */
2323 if (l != 0)
2324 abort ();
2327 return 1;
2330 /* Generate several move instructions to store LEN bytes generated by
2331 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2332 pointer which will be passed as argument in every CONSTFUN call.
2333 ALIGN is maximum alignment we can assume. */
2335 void
2336 store_by_pieces (to, len, constfun, constfundata, align)
2337 rtx to;
2338 unsigned HOST_WIDE_INT len;
2339 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2340 PTR constfundata;
2341 unsigned int align;
2343 struct store_by_pieces data;
2345 if (! MOVE_BY_PIECES_P (len, align))
2346 abort ();
2347 to = protect_from_queue (to, 1);
2348 data.constfun = constfun;
2349 data.constfundata = constfundata;
2350 data.len = len;
2351 data.to = to;
2352 store_by_pieces_1 (&data, align);
2355 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2356 rtx with BLKmode). The caller must pass TO through protect_from_queue
2357 before calling. ALIGN is maximum alignment we can assume. */
2359 static void
2360 clear_by_pieces (to, len, align)
2361 rtx to;
2362 unsigned HOST_WIDE_INT len;
2363 unsigned int align;
2365 struct store_by_pieces data;
2367 data.constfun = clear_by_pieces_1;
2368 data.constfundata = NULL_PTR;
2369 data.len = len;
2370 data.to = to;
2371 store_by_pieces_1 (&data, align);
2374 /* Callback routine for clear_by_pieces.
2375 Return const0_rtx unconditionally. */
2377 static rtx
2378 clear_by_pieces_1 (data, offset, mode)
2379 PTR data ATTRIBUTE_UNUSED;
2380 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2381 enum machine_mode mode ATTRIBUTE_UNUSED;
2383 return const0_rtx;
2386 /* Subroutine of clear_by_pieces and store_by_pieces.
2387 Generate several move instructions to store LEN bytes of block TO. (A MEM
2388 rtx with BLKmode). The caller must pass TO through protect_from_queue
2389 before calling. ALIGN is maximum alignment we can assume. */
2391 static void
2392 store_by_pieces_1 (data, align)
2393 struct store_by_pieces *data;
2394 unsigned int align;
2396 rtx to_addr = XEXP (data->to, 0);
2397 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2398 enum machine_mode mode = VOIDmode, tmode;
2399 enum insn_code icode;
2401 data->offset = 0;
2402 data->to_addr = to_addr;
2403 data->autinc_to
2404 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2405 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2407 data->explicit_inc_to = 0;
2408 data->reverse
2409 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2410 if (data->reverse)
2411 data->offset = data->len;
2413 /* If storing requires more than two move insns,
2414 copy addresses to registers (to make displacements shorter)
2415 and use post-increment if available. */
2416 if (!data->autinc_to
2417 && move_by_pieces_ninsns (data->len, align) > 2)
2419 /* Determine the main mode we'll be using. */
2420 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2421 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2422 if (GET_MODE_SIZE (tmode) < max_size)
2423 mode = tmode;
2425 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2427 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2428 data->autinc_to = 1;
2429 data->explicit_inc_to = -1;
2432 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2433 && ! data->autinc_to)
2435 data->to_addr = copy_addr_to_reg (to_addr);
2436 data->autinc_to = 1;
2437 data->explicit_inc_to = 1;
2440 if ( !data->autinc_to && CONSTANT_P (to_addr))
2441 data->to_addr = copy_addr_to_reg (to_addr);
2444 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2445 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2446 align = MOVE_MAX * BITS_PER_UNIT;
2448 /* First store what we can in the largest integer mode, then go to
2449 successively smaller modes. */
2451 while (max_size > 1)
2453 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2454 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2455 if (GET_MODE_SIZE (tmode) < max_size)
2456 mode = tmode;
2458 if (mode == VOIDmode)
2459 break;
2461 icode = mov_optab->handlers[(int) mode].insn_code;
2462 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2463 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2465 max_size = GET_MODE_SIZE (mode);
2468 /* The code above should have handled everything. */
2469 if (data->len != 0)
2470 abort ();
2473 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2474 with move instructions for mode MODE. GENFUN is the gen_... function
2475 to make a move insn for that mode. DATA has all the other info. */
2477 static void
2478 store_by_pieces_2 (genfun, mode, data)
2479 rtx (*genfun) PARAMS ((rtx, ...));
2480 enum machine_mode mode;
2481 struct store_by_pieces *data;
2483 unsigned int size = GET_MODE_SIZE (mode);
2484 rtx to1, cst;
2486 while (data->len >= size)
2488 if (data->reverse)
2489 data->offset -= size;
2491 if (data->autinc_to)
2493 to1 = gen_rtx_MEM (mode, data->to_addr);
2494 MEM_COPY_ATTRIBUTES (to1, data->to);
2496 else
2497 to1 = change_address (data->to, mode,
2498 plus_constant (data->to_addr, data->offset));
2500 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2501 emit_insn (gen_add2_insn (data->to_addr,
2502 GEN_INT (-(HOST_WIDE_INT) size)));
2504 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2505 emit_insn ((*genfun) (to1, cst));
2507 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2508 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2510 if (! data->reverse)
2511 data->offset += size;
2513 data->len -= size;
2517 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2518 its length in bytes and ALIGN is the maximum alignment we can is has.
2520 If we call a function that returns the length of the block, return it. */
2523 clear_storage (object, size, align)
2524 rtx object;
2525 rtx size;
2526 unsigned int align;
2528 #ifdef TARGET_MEM_FUNCTIONS
2529 static tree fn;
2530 tree call_expr, arg_list;
2531 #endif
2532 rtx retval = 0;
2534 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2535 just move a zero. Otherwise, do this a piece at a time. */
2536 if (GET_MODE (object) != BLKmode
2537 && GET_CODE (size) == CONST_INT
2538 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2539 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2540 else
2542 object = protect_from_queue (object, 1);
2543 size = protect_from_queue (size, 0);
2545 if (GET_CODE (size) == CONST_INT
2546 && MOVE_BY_PIECES_P (INTVAL (size), align))
2547 clear_by_pieces (object, INTVAL (size), align);
2548 else
2550 /* Try the most limited insn first, because there's no point
2551 including more than one in the machine description unless
2552 the more limited one has some advantage. */
2554 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2555 enum machine_mode mode;
2557 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2558 mode = GET_MODE_WIDER_MODE (mode))
2560 enum insn_code code = clrstr_optab[(int) mode];
2561 insn_operand_predicate_fn pred;
2563 if (code != CODE_FOR_nothing
2564 /* We don't need MODE to be narrower than
2565 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2566 the mode mask, as it is returned by the macro, it will
2567 definitely be less than the actual mode mask. */
2568 && ((GET_CODE (size) == CONST_INT
2569 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2570 <= (GET_MODE_MASK (mode) >> 1)))
2571 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2572 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2573 || (*pred) (object, BLKmode))
2574 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2575 || (*pred) (opalign, VOIDmode)))
2577 rtx op1;
2578 rtx last = get_last_insn ();
2579 rtx pat;
2581 op1 = convert_to_mode (mode, size, 1);
2582 pred = insn_data[(int) code].operand[1].predicate;
2583 if (pred != 0 && ! (*pred) (op1, mode))
2584 op1 = copy_to_mode_reg (mode, op1);
2586 pat = GEN_FCN ((int) code) (object, op1, opalign);
2587 if (pat)
2589 emit_insn (pat);
2590 return 0;
2592 else
2593 delete_insns_since (last);
2597 /* OBJECT or SIZE may have been passed through protect_from_queue.
2599 It is unsafe to save the value generated by protect_from_queue
2600 and reuse it later. Consider what happens if emit_queue is
2601 called before the return value from protect_from_queue is used.
2603 Expansion of the CALL_EXPR below will call emit_queue before
2604 we are finished emitting RTL for argument setup. So if we are
2605 not careful we could get the wrong value for an argument.
2607 To avoid this problem we go ahead and emit code to copy OBJECT
2608 and SIZE into new pseudos. We can then place those new pseudos
2609 into an RTL_EXPR and use them later, even after a call to
2610 emit_queue.
2612 Note this is not strictly needed for library calls since they
2613 do not call emit_queue before loading their arguments. However,
2614 we may need to have library calls call emit_queue in the future
2615 since failing to do so could cause problems for targets which
2616 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2617 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2619 #ifdef TARGET_MEM_FUNCTIONS
2620 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2621 #else
2622 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2623 TREE_UNSIGNED (integer_type_node));
2624 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2625 #endif
2627 #ifdef TARGET_MEM_FUNCTIONS
2628 /* It is incorrect to use the libcall calling conventions to call
2629 memset in this context.
2631 This could be a user call to memset and the user may wish to
2632 examine the return value from memset.
2634 For targets where libcalls and normal calls have different
2635 conventions for returning pointers, we could end up generating
2636 incorrect code.
2638 So instead of using a libcall sequence we build up a suitable
2639 CALL_EXPR and expand the call in the normal fashion. */
2640 if (fn == NULL_TREE)
2642 tree fntype;
2644 /* This was copied from except.c, I don't know if all this is
2645 necessary in this context or not. */
2646 fn = get_identifier ("memset");
2647 fntype = build_pointer_type (void_type_node);
2648 fntype = build_function_type (fntype, NULL_TREE);
2649 fn = build_decl (FUNCTION_DECL, fn, fntype);
2650 ggc_add_tree_root (&fn, 1);
2651 DECL_EXTERNAL (fn) = 1;
2652 TREE_PUBLIC (fn) = 1;
2653 DECL_ARTIFICIAL (fn) = 1;
2654 TREE_NOTHROW (fn) = 1;
2655 make_decl_rtl (fn, NULL_PTR);
2656 assemble_external (fn);
2659 /* We need to make an argument list for the function call.
2661 memset has three arguments, the first is a void * addresses, the
2662 second a integer with the initialization value, the last is a
2663 size_t byte count for the copy. */
2664 arg_list
2665 = build_tree_list (NULL_TREE,
2666 make_tree (build_pointer_type (void_type_node),
2667 object));
2668 TREE_CHAIN (arg_list)
2669 = build_tree_list (NULL_TREE,
2670 make_tree (integer_type_node, const0_rtx));
2671 TREE_CHAIN (TREE_CHAIN (arg_list))
2672 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2673 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2675 /* Now we have to build up the CALL_EXPR itself. */
2676 call_expr = build1 (ADDR_EXPR,
2677 build_pointer_type (TREE_TYPE (fn)), fn);
2678 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2679 call_expr, arg_list, NULL_TREE);
2680 TREE_SIDE_EFFECTS (call_expr) = 1;
2682 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2683 #else
2684 emit_library_call (bzero_libfunc, LCT_NORMAL,
2685 VOIDmode, 2, object, Pmode, size,
2686 TYPE_MODE (integer_type_node));
2687 #endif
2691 return retval;
2694 /* Generate code to copy Y into X.
2695 Both Y and X must have the same mode, except that
2696 Y can be a constant with VOIDmode.
2697 This mode cannot be BLKmode; use emit_block_move for that.
2699 Return the last instruction emitted. */
2702 emit_move_insn (x, y)
2703 rtx x, y;
2705 enum machine_mode mode = GET_MODE (x);
2706 rtx y_cst = NULL_RTX;
2707 rtx last_insn;
2709 x = protect_from_queue (x, 1);
2710 y = protect_from_queue (y, 0);
2712 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2713 abort ();
2715 /* Never force constant_p_rtx to memory. */
2716 if (GET_CODE (y) == CONSTANT_P_RTX)
2718 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2720 y_cst = y;
2721 y = force_const_mem (mode, y);
2724 /* If X or Y are memory references, verify that their addresses are valid
2725 for the machine. */
2726 if (GET_CODE (x) == MEM
2727 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2728 && ! push_operand (x, GET_MODE (x)))
2729 || (flag_force_addr
2730 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2731 x = change_address (x, VOIDmode, XEXP (x, 0));
2733 if (GET_CODE (y) == MEM
2734 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2735 || (flag_force_addr
2736 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2737 y = change_address (y, VOIDmode, XEXP (y, 0));
2739 if (mode == BLKmode)
2740 abort ();
2742 last_insn = emit_move_insn_1 (x, y);
2744 if (y_cst && GET_CODE (x) == REG)
2745 REG_NOTES (last_insn)
2746 = gen_rtx_EXPR_LIST (REG_EQUAL, y_cst, REG_NOTES (last_insn));
2748 return last_insn;
2751 /* Low level part of emit_move_insn.
2752 Called just like emit_move_insn, but assumes X and Y
2753 are basically valid. */
2756 emit_move_insn_1 (x, y)
2757 rtx x, y;
2759 enum machine_mode mode = GET_MODE (x);
2760 enum machine_mode submode;
2761 enum mode_class class = GET_MODE_CLASS (mode);
2762 unsigned int i;
2764 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2765 abort ();
2767 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2768 return
2769 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2771 /* Expand complex moves by moving real part and imag part, if possible. */
2772 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2773 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2774 * BITS_PER_UNIT),
2775 (class == MODE_COMPLEX_INT
2776 ? MODE_INT : MODE_FLOAT),
2778 && (mov_optab->handlers[(int) submode].insn_code
2779 != CODE_FOR_nothing))
2781 /* Don't split destination if it is a stack push. */
2782 int stack = push_operand (x, GET_MODE (x));
2784 /* If this is a stack, push the highpart first, so it
2785 will be in the argument order.
2787 In that case, change_address is used only to convert
2788 the mode, not to change the address. */
2789 if (stack)
2791 /* Note that the real part always precedes the imag part in memory
2792 regardless of machine's endianness. */
2793 #ifdef STACK_GROWS_DOWNWARD
2794 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2795 (gen_rtx_MEM (submode, XEXP (x, 0)),
2796 gen_imagpart (submode, y)));
2797 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2798 (gen_rtx_MEM (submode, XEXP (x, 0)),
2799 gen_realpart (submode, y)));
2800 #else
2801 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2802 (gen_rtx_MEM (submode, XEXP (x, 0)),
2803 gen_realpart (submode, y)));
2804 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2805 (gen_rtx_MEM (submode, XEXP (x, 0)),
2806 gen_imagpart (submode, y)));
2807 #endif
2809 else
2811 rtx realpart_x, realpart_y;
2812 rtx imagpart_x, imagpart_y;
2814 /* If this is a complex value with each part being smaller than a
2815 word, the usual calling sequence will likely pack the pieces into
2816 a single register. Unfortunately, SUBREG of hard registers only
2817 deals in terms of words, so we have a problem converting input
2818 arguments to the CONCAT of two registers that is used elsewhere
2819 for complex values. If this is before reload, we can copy it into
2820 memory and reload. FIXME, we should see about using extract and
2821 insert on integer registers, but complex short and complex char
2822 variables should be rarely used. */
2823 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2824 && (reload_in_progress | reload_completed) == 0)
2826 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2827 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2829 if (packed_dest_p || packed_src_p)
2831 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2832 ? MODE_FLOAT : MODE_INT);
2834 enum machine_mode reg_mode
2835 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2837 if (reg_mode != BLKmode)
2839 rtx mem = assign_stack_temp (reg_mode,
2840 GET_MODE_SIZE (mode), 0);
2841 rtx cmem = change_address (mem, mode, NULL_RTX);
2843 cfun->cannot_inline
2844 = N_("function using short complex types cannot be inline");
2846 if (packed_dest_p)
2848 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2849 emit_move_insn_1 (cmem, y);
2850 return emit_move_insn_1 (sreg, mem);
2852 else
2854 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2855 emit_move_insn_1 (mem, sreg);
2856 return emit_move_insn_1 (x, cmem);
2862 realpart_x = gen_realpart (submode, x);
2863 realpart_y = gen_realpart (submode, y);
2864 imagpart_x = gen_imagpart (submode, x);
2865 imagpart_y = gen_imagpart (submode, y);
2867 /* Show the output dies here. This is necessary for SUBREGs
2868 of pseudos since we cannot track their lifetimes correctly;
2869 hard regs shouldn't appear here except as return values.
2870 We never want to emit such a clobber after reload. */
2871 if (x != y
2872 && ! (reload_in_progress || reload_completed)
2873 && (GET_CODE (realpart_x) == SUBREG
2874 || GET_CODE (imagpart_x) == SUBREG))
2876 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2879 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2880 (realpart_x, realpart_y));
2881 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2882 (imagpart_x, imagpart_y));
2885 return get_last_insn ();
2888 /* This will handle any multi-word mode that lacks a move_insn pattern.
2889 However, you will get better code if you define such patterns,
2890 even if they must turn into multiple assembler instructions. */
2891 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2893 rtx last_insn = 0;
2894 rtx seq, inner;
2895 int need_clobber;
2897 #ifdef PUSH_ROUNDING
2899 /* If X is a push on the stack, do the push now and replace
2900 X with a reference to the stack pointer. */
2901 if (push_operand (x, GET_MODE (x)))
2903 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2904 x = change_address (x, VOIDmode, stack_pointer_rtx);
2906 #endif
2908 /* If we are in reload, see if either operand is a MEM whose address
2909 is scheduled for replacement. */
2910 if (reload_in_progress && GET_CODE (x) == MEM
2911 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2913 rtx new = gen_rtx_MEM (GET_MODE (x), inner);
2915 MEM_COPY_ATTRIBUTES (new, x);
2916 x = new;
2918 if (reload_in_progress && GET_CODE (y) == MEM
2919 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2921 rtx new = gen_rtx_MEM (GET_MODE (y), inner);
2923 MEM_COPY_ATTRIBUTES (new, y);
2924 y = new;
2927 start_sequence ();
2929 need_clobber = 0;
2930 for (i = 0;
2931 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2932 i++)
2934 rtx xpart = operand_subword (x, i, 1, mode);
2935 rtx ypart = operand_subword (y, i, 1, mode);
2937 /* If we can't get a part of Y, put Y into memory if it is a
2938 constant. Otherwise, force it into a register. If we still
2939 can't get a part of Y, abort. */
2940 if (ypart == 0 && CONSTANT_P (y))
2942 y = force_const_mem (mode, y);
2943 ypart = operand_subword (y, i, 1, mode);
2945 else if (ypart == 0)
2946 ypart = operand_subword_force (y, i, mode);
2948 if (xpart == 0 || ypart == 0)
2949 abort ();
2951 need_clobber |= (GET_CODE (xpart) == SUBREG);
2953 last_insn = emit_move_insn (xpart, ypart);
2956 seq = gen_sequence ();
2957 end_sequence ();
2959 /* Show the output dies here. This is necessary for SUBREGs
2960 of pseudos since we cannot track their lifetimes correctly;
2961 hard regs shouldn't appear here except as return values.
2962 We never want to emit such a clobber after reload. */
2963 if (x != y
2964 && ! (reload_in_progress || reload_completed)
2965 && need_clobber != 0)
2967 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2970 emit_insn (seq);
2972 return last_insn;
2974 else
2975 abort ();
2978 /* Pushing data onto the stack. */
2980 /* Push a block of length SIZE (perhaps variable)
2981 and return an rtx to address the beginning of the block.
2982 Note that it is not possible for the value returned to be a QUEUED.
2983 The value may be virtual_outgoing_args_rtx.
2985 EXTRA is the number of bytes of padding to push in addition to SIZE.
2986 BELOW nonzero means this padding comes at low addresses;
2987 otherwise, the padding comes at high addresses. */
2990 push_block (size, extra, below)
2991 rtx size;
2992 int extra, below;
2994 register rtx temp;
2996 size = convert_modes (Pmode, ptr_mode, size, 1);
2997 if (CONSTANT_P (size))
2998 anti_adjust_stack (plus_constant (size, extra));
2999 else if (GET_CODE (size) == REG && extra == 0)
3000 anti_adjust_stack (size);
3001 else
3003 temp = copy_to_mode_reg (Pmode, size);
3004 if (extra != 0)
3005 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3006 temp, 0, OPTAB_LIB_WIDEN);
3007 anti_adjust_stack (temp);
3010 #ifndef STACK_GROWS_DOWNWARD
3011 #ifdef ARGS_GROW_DOWNWARD
3012 if (!ACCUMULATE_OUTGOING_ARGS)
3013 #else
3014 if (0)
3015 #endif
3016 #else
3017 if (1)
3018 #endif
3020 /* Return the lowest stack address when STACK or ARGS grow downward and
3021 we are not aaccumulating outgoing arguments (the c4x port uses such
3022 conventions). */
3023 temp = virtual_outgoing_args_rtx;
3024 if (extra != 0 && below)
3025 temp = plus_constant (temp, extra);
3027 else
3029 if (GET_CODE (size) == CONST_INT)
3030 temp = plus_constant (virtual_outgoing_args_rtx,
3031 -INTVAL (size) - (below ? 0 : extra));
3032 else if (extra != 0 && !below)
3033 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3034 negate_rtx (Pmode, plus_constant (size, extra)));
3035 else
3036 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3037 negate_rtx (Pmode, size));
3040 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3044 gen_push_operand ()
3046 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3049 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
3050 block of SIZE bytes. */
3052 static rtx
3053 get_push_address (size)
3054 int size;
3056 register rtx temp;
3058 if (STACK_PUSH_CODE == POST_DEC)
3059 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3060 else if (STACK_PUSH_CODE == POST_INC)
3061 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3062 else
3063 temp = stack_pointer_rtx;
3065 return copy_to_reg (temp);
3068 /* Generate code to push X onto the stack, assuming it has mode MODE and
3069 type TYPE.
3070 MODE is redundant except when X is a CONST_INT (since they don't
3071 carry mode info).
3072 SIZE is an rtx for the size of data to be copied (in bytes),
3073 needed only if X is BLKmode.
3075 ALIGN (in bits) is maximum alignment we can assume.
3077 If PARTIAL and REG are both nonzero, then copy that many of the first
3078 words of X into registers starting with REG, and push the rest of X.
3079 The amount of space pushed is decreased by PARTIAL words,
3080 rounded *down* to a multiple of PARM_BOUNDARY.
3081 REG must be a hard register in this case.
3082 If REG is zero but PARTIAL is not, take any all others actions for an
3083 argument partially in registers, but do not actually load any
3084 registers.
3086 EXTRA is the amount in bytes of extra space to leave next to this arg.
3087 This is ignored if an argument block has already been allocated.
3089 On a machine that lacks real push insns, ARGS_ADDR is the address of
3090 the bottom of the argument block for this call. We use indexing off there
3091 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3092 argument block has not been preallocated.
3094 ARGS_SO_FAR is the size of args previously pushed for this call.
3096 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3097 for arguments passed in registers. If nonzero, it will be the number
3098 of bytes required. */
3100 void
3101 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3102 args_addr, args_so_far, reg_parm_stack_space,
3103 alignment_pad)
3104 register rtx x;
3105 enum machine_mode mode;
3106 tree type;
3107 rtx size;
3108 unsigned int align;
3109 int partial;
3110 rtx reg;
3111 int extra;
3112 rtx args_addr;
3113 rtx args_so_far;
3114 int reg_parm_stack_space;
3115 rtx alignment_pad;
3117 rtx xinner;
3118 enum direction stack_direction
3119 #ifdef STACK_GROWS_DOWNWARD
3120 = downward;
3121 #else
3122 = upward;
3123 #endif
3125 /* Decide where to pad the argument: `downward' for below,
3126 `upward' for above, or `none' for don't pad it.
3127 Default is below for small data on big-endian machines; else above. */
3128 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3130 /* Invert direction if stack is post-update. */
3131 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
3132 if (where_pad != none)
3133 where_pad = (where_pad == downward ? upward : downward);
3135 xinner = x = protect_from_queue (x, 0);
3137 if (mode == BLKmode)
3139 /* Copy a block into the stack, entirely or partially. */
3141 register rtx temp;
3142 int used = partial * UNITS_PER_WORD;
3143 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3144 int skip;
3146 if (size == 0)
3147 abort ();
3149 used -= offset;
3151 /* USED is now the # of bytes we need not copy to the stack
3152 because registers will take care of them. */
3154 if (partial != 0)
3155 xinner = change_address (xinner, BLKmode,
3156 plus_constant (XEXP (xinner, 0), used));
3158 /* If the partial register-part of the arg counts in its stack size,
3159 skip the part of stack space corresponding to the registers.
3160 Otherwise, start copying to the beginning of the stack space,
3161 by setting SKIP to 0. */
3162 skip = (reg_parm_stack_space == 0) ? 0 : used;
3164 #ifdef PUSH_ROUNDING
3165 /* Do it with several push insns if that doesn't take lots of insns
3166 and if there is no difficulty with push insns that skip bytes
3167 on the stack for alignment purposes. */
3168 if (args_addr == 0
3169 && PUSH_ARGS
3170 && GET_CODE (size) == CONST_INT
3171 && skip == 0
3172 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3173 /* Here we avoid the case of a structure whose weak alignment
3174 forces many pushes of a small amount of data,
3175 and such small pushes do rounding that causes trouble. */
3176 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3177 || align >= BIGGEST_ALIGNMENT
3178 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3179 == (align / BITS_PER_UNIT)))
3180 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3182 /* Push padding now if padding above and stack grows down,
3183 or if padding below and stack grows up.
3184 But if space already allocated, this has already been done. */
3185 if (extra && args_addr == 0
3186 && where_pad != none && where_pad != stack_direction)
3187 anti_adjust_stack (GEN_INT (extra));
3189 stack_pointer_delta += INTVAL (size) - used;
3190 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
3191 INTVAL (size) - used, align);
3193 if (current_function_check_memory_usage && ! in_check_memory_usage)
3195 rtx temp;
3197 in_check_memory_usage = 1;
3198 temp = get_push_address (INTVAL (size) - used);
3199 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3200 emit_library_call (chkr_copy_bitmap_libfunc,
3201 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3202 Pmode, XEXP (xinner, 0), Pmode,
3203 GEN_INT (INTVAL (size) - used),
3204 TYPE_MODE (sizetype));
3205 else
3206 emit_library_call (chkr_set_right_libfunc,
3207 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3208 Pmode, GEN_INT (INTVAL (size) - used),
3209 TYPE_MODE (sizetype),
3210 GEN_INT (MEMORY_USE_RW),
3211 TYPE_MODE (integer_type_node));
3212 in_check_memory_usage = 0;
3215 else
3216 #endif /* PUSH_ROUNDING */
3218 rtx target;
3220 /* Otherwise make space on the stack and copy the data
3221 to the address of that space. */
3223 /* Deduct words put into registers from the size we must copy. */
3224 if (partial != 0)
3226 if (GET_CODE (size) == CONST_INT)
3227 size = GEN_INT (INTVAL (size) - used);
3228 else
3229 size = expand_binop (GET_MODE (size), sub_optab, size,
3230 GEN_INT (used), NULL_RTX, 0,
3231 OPTAB_LIB_WIDEN);
3234 /* Get the address of the stack space.
3235 In this case, we do not deal with EXTRA separately.
3236 A single stack adjust will do. */
3237 if (! args_addr)
3239 temp = push_block (size, extra, where_pad == downward);
3240 extra = 0;
3242 else if (GET_CODE (args_so_far) == CONST_INT)
3243 temp = memory_address (BLKmode,
3244 plus_constant (args_addr,
3245 skip + INTVAL (args_so_far)));
3246 else
3247 temp = memory_address (BLKmode,
3248 plus_constant (gen_rtx_PLUS (Pmode,
3249 args_addr,
3250 args_so_far),
3251 skip));
3252 if (current_function_check_memory_usage && ! in_check_memory_usage)
3254 in_check_memory_usage = 1;
3255 target = copy_to_reg (temp);
3256 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3257 emit_library_call (chkr_copy_bitmap_libfunc,
3258 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3259 target, Pmode,
3260 XEXP (xinner, 0), Pmode,
3261 size, TYPE_MODE (sizetype));
3262 else
3263 emit_library_call (chkr_set_right_libfunc,
3264 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3265 target, Pmode,
3266 size, TYPE_MODE (sizetype),
3267 GEN_INT (MEMORY_USE_RW),
3268 TYPE_MODE (integer_type_node));
3269 in_check_memory_usage = 0;
3272 target = gen_rtx_MEM (BLKmode, temp);
3274 if (type != 0)
3276 set_mem_attributes (target, type, 1);
3277 /* Function incoming arguments may overlap with sibling call
3278 outgoing arguments and we cannot allow reordering of reads
3279 from function arguments with stores to outgoing arguments
3280 of sibling calls. */
3281 MEM_ALIAS_SET (target) = 0;
3284 /* TEMP is the address of the block. Copy the data there. */
3285 if (GET_CODE (size) == CONST_INT
3286 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3288 move_by_pieces (target, xinner, INTVAL (size), align);
3289 goto ret;
3291 else
3293 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3294 enum machine_mode mode;
3296 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3297 mode != VOIDmode;
3298 mode = GET_MODE_WIDER_MODE (mode))
3300 enum insn_code code = movstr_optab[(int) mode];
3301 insn_operand_predicate_fn pred;
3303 if (code != CODE_FOR_nothing
3304 && ((GET_CODE (size) == CONST_INT
3305 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3306 <= (GET_MODE_MASK (mode) >> 1)))
3307 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3308 && (!(pred = insn_data[(int) code].operand[0].predicate)
3309 || ((*pred) (target, BLKmode)))
3310 && (!(pred = insn_data[(int) code].operand[1].predicate)
3311 || ((*pred) (xinner, BLKmode)))
3312 && (!(pred = insn_data[(int) code].operand[3].predicate)
3313 || ((*pred) (opalign, VOIDmode))))
3315 rtx op2 = convert_to_mode (mode, size, 1);
3316 rtx last = get_last_insn ();
3317 rtx pat;
3319 pred = insn_data[(int) code].operand[2].predicate;
3320 if (pred != 0 && ! (*pred) (op2, mode))
3321 op2 = copy_to_mode_reg (mode, op2);
3323 pat = GEN_FCN ((int) code) (target, xinner,
3324 op2, opalign);
3325 if (pat)
3327 emit_insn (pat);
3328 goto ret;
3330 else
3331 delete_insns_since (last);
3336 if (!ACCUMULATE_OUTGOING_ARGS)
3338 /* If the source is referenced relative to the stack pointer,
3339 copy it to another register to stabilize it. We do not need
3340 to do this if we know that we won't be changing sp. */
3342 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3343 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3344 temp = copy_to_reg (temp);
3347 /* Make inhibit_defer_pop nonzero around the library call
3348 to force it to pop the bcopy-arguments right away. */
3349 NO_DEFER_POP;
3350 #ifdef TARGET_MEM_FUNCTIONS
3351 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3352 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3353 convert_to_mode (TYPE_MODE (sizetype),
3354 size, TREE_UNSIGNED (sizetype)),
3355 TYPE_MODE (sizetype));
3356 #else
3357 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3358 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3359 convert_to_mode (TYPE_MODE (integer_type_node),
3360 size,
3361 TREE_UNSIGNED (integer_type_node)),
3362 TYPE_MODE (integer_type_node));
3363 #endif
3364 OK_DEFER_POP;
3367 else if (partial > 0)
3369 /* Scalar partly in registers. */
3371 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3372 int i;
3373 int not_stack;
3374 /* # words of start of argument
3375 that we must make space for but need not store. */
3376 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3377 int args_offset = INTVAL (args_so_far);
3378 int skip;
3380 /* Push padding now if padding above and stack grows down,
3381 or if padding below and stack grows up.
3382 But if space already allocated, this has already been done. */
3383 if (extra && args_addr == 0
3384 && where_pad != none && where_pad != stack_direction)
3385 anti_adjust_stack (GEN_INT (extra));
3387 /* If we make space by pushing it, we might as well push
3388 the real data. Otherwise, we can leave OFFSET nonzero
3389 and leave the space uninitialized. */
3390 if (args_addr == 0)
3391 offset = 0;
3393 /* Now NOT_STACK gets the number of words that we don't need to
3394 allocate on the stack. */
3395 not_stack = partial - offset;
3397 /* If the partial register-part of the arg counts in its stack size,
3398 skip the part of stack space corresponding to the registers.
3399 Otherwise, start copying to the beginning of the stack space,
3400 by setting SKIP to 0. */
3401 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3403 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3404 x = validize_mem (force_const_mem (mode, x));
3406 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3407 SUBREGs of such registers are not allowed. */
3408 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3409 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3410 x = copy_to_reg (x);
3412 /* Loop over all the words allocated on the stack for this arg. */
3413 /* We can do it by words, because any scalar bigger than a word
3414 has a size a multiple of a word. */
3415 #ifndef PUSH_ARGS_REVERSED
3416 for (i = not_stack; i < size; i++)
3417 #else
3418 for (i = size - 1; i >= not_stack; i--)
3419 #endif
3420 if (i >= not_stack + offset)
3421 emit_push_insn (operand_subword_force (x, i, mode),
3422 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3423 0, args_addr,
3424 GEN_INT (args_offset + ((i - not_stack + skip)
3425 * UNITS_PER_WORD)),
3426 reg_parm_stack_space, alignment_pad);
3428 else
3430 rtx addr;
3431 rtx target = NULL_RTX;
3432 rtx dest;
3434 /* Push padding now if padding above and stack grows down,
3435 or if padding below and stack grows up.
3436 But if space already allocated, this has already been done. */
3437 if (extra && args_addr == 0
3438 && where_pad != none && where_pad != stack_direction)
3439 anti_adjust_stack (GEN_INT (extra));
3441 #ifdef PUSH_ROUNDING
3442 if (args_addr == 0 && PUSH_ARGS)
3444 addr = gen_push_operand ();
3445 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3447 else
3448 #endif
3450 if (GET_CODE (args_so_far) == CONST_INT)
3451 addr
3452 = memory_address (mode,
3453 plus_constant (args_addr,
3454 INTVAL (args_so_far)));
3455 else
3456 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3457 args_so_far));
3458 target = addr;
3461 dest = gen_rtx_MEM (mode, addr);
3462 if (type != 0)
3464 set_mem_attributes (dest, type, 1);
3465 /* Function incoming arguments may overlap with sibling call
3466 outgoing arguments and we cannot allow reordering of reads
3467 from function arguments with stores to outgoing arguments
3468 of sibling calls. */
3469 MEM_ALIAS_SET (dest) = 0;
3472 emit_move_insn (dest, x);
3474 if (current_function_check_memory_usage && ! in_check_memory_usage)
3476 in_check_memory_usage = 1;
3477 if (target == 0)
3478 target = get_push_address (GET_MODE_SIZE (mode));
3480 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3481 emit_library_call (chkr_copy_bitmap_libfunc,
3482 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3483 Pmode, XEXP (x, 0), Pmode,
3484 GEN_INT (GET_MODE_SIZE (mode)),
3485 TYPE_MODE (sizetype));
3486 else
3487 emit_library_call (chkr_set_right_libfunc,
3488 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3489 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
3490 TYPE_MODE (sizetype),
3491 GEN_INT (MEMORY_USE_RW),
3492 TYPE_MODE (integer_type_node));
3493 in_check_memory_usage = 0;
3497 ret:
3498 /* If part should go in registers, copy that part
3499 into the appropriate registers. Do this now, at the end,
3500 since mem-to-mem copies above may do function calls. */
3501 if (partial > 0 && reg != 0)
3503 /* Handle calls that pass values in multiple non-contiguous locations.
3504 The Irix 6 ABI has examples of this. */
3505 if (GET_CODE (reg) == PARALLEL)
3506 emit_group_load (reg, x, -1, align); /* ??? size? */
3507 else
3508 move_block_to_reg (REGNO (reg), x, partial, mode);
3511 if (extra && args_addr == 0 && where_pad == stack_direction)
3512 anti_adjust_stack (GEN_INT (extra));
3514 if (alignment_pad && args_addr == 0)
3515 anti_adjust_stack (alignment_pad);
3518 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3519 operations. */
3521 static rtx
3522 get_subtarget (x)
3523 rtx x;
3525 return ((x == 0
3526 /* Only registers can be subtargets. */
3527 || GET_CODE (x) != REG
3528 /* If the register is readonly, it can't be set more than once. */
3529 || RTX_UNCHANGING_P (x)
3530 /* Don't use hard regs to avoid extending their life. */
3531 || REGNO (x) < FIRST_PSEUDO_REGISTER
3532 /* Avoid subtargets inside loops,
3533 since they hide some invariant expressions. */
3534 || preserve_subexpressions_p ())
3535 ? 0 : x);
3538 /* Expand an assignment that stores the value of FROM into TO.
3539 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3540 (This may contain a QUEUED rtx;
3541 if the value is constant, this rtx is a constant.)
3542 Otherwise, the returned value is NULL_RTX.
3544 SUGGEST_REG is no longer actually used.
3545 It used to mean, copy the value through a register
3546 and return that register, if that is possible.
3547 We now use WANT_VALUE to decide whether to do this. */
3550 expand_assignment (to, from, want_value, suggest_reg)
3551 tree to, from;
3552 int want_value;
3553 int suggest_reg ATTRIBUTE_UNUSED;
3555 register rtx to_rtx = 0;
3556 rtx result;
3558 /* Don't crash if the lhs of the assignment was erroneous. */
3560 if (TREE_CODE (to) == ERROR_MARK)
3562 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3563 return want_value ? result : NULL_RTX;
3566 /* Assignment of a structure component needs special treatment
3567 if the structure component's rtx is not simply a MEM.
3568 Assignment of an array element at a constant index, and assignment of
3569 an array element in an unaligned packed structure field, has the same
3570 problem. */
3572 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3573 || TREE_CODE (to) == ARRAY_REF)
3575 enum machine_mode mode1;
3576 HOST_WIDE_INT bitsize, bitpos;
3577 tree offset;
3578 int unsignedp;
3579 int volatilep = 0;
3580 tree tem;
3581 unsigned int alignment;
3583 push_temp_slots ();
3584 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3585 &unsignedp, &volatilep, &alignment);
3587 /* If we are going to use store_bit_field and extract_bit_field,
3588 make sure to_rtx will be safe for multiple use. */
3590 if (mode1 == VOIDmode && want_value)
3591 tem = stabilize_reference (tem);
3593 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3594 if (offset != 0)
3596 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3598 if (GET_CODE (to_rtx) != MEM)
3599 abort ();
3601 if (GET_MODE (offset_rtx) != ptr_mode)
3603 #ifdef POINTERS_EXTEND_UNSIGNED
3604 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3605 #else
3606 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3607 #endif
3610 /* A constant address in TO_RTX can have VOIDmode, we must not try
3611 to call force_reg for that case. Avoid that case. */
3612 if (GET_CODE (to_rtx) == MEM
3613 && GET_MODE (to_rtx) == BLKmode
3614 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3615 && bitsize
3616 && (bitpos % bitsize) == 0
3617 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3618 && alignment == GET_MODE_ALIGNMENT (mode1))
3620 rtx temp = change_address (to_rtx, mode1,
3621 plus_constant (XEXP (to_rtx, 0),
3622 (bitpos /
3623 BITS_PER_UNIT)));
3624 if (GET_CODE (XEXP (temp, 0)) == REG)
3625 to_rtx = temp;
3626 else
3627 to_rtx = change_address (to_rtx, mode1,
3628 force_reg (GET_MODE (XEXP (temp, 0)),
3629 XEXP (temp, 0)));
3630 bitpos = 0;
3633 to_rtx = change_address (to_rtx, VOIDmode,
3634 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3635 force_reg (ptr_mode,
3636 offset_rtx)));
3639 if (volatilep)
3641 if (GET_CODE (to_rtx) == MEM)
3643 /* When the offset is zero, to_rtx is the address of the
3644 structure we are storing into, and hence may be shared.
3645 We must make a new MEM before setting the volatile bit. */
3646 if (offset == 0)
3647 to_rtx = copy_rtx (to_rtx);
3649 MEM_VOLATILE_P (to_rtx) = 1;
3651 #if 0 /* This was turned off because, when a field is volatile
3652 in an object which is not volatile, the object may be in a register,
3653 and then we would abort over here. */
3654 else
3655 abort ();
3656 #endif
3659 if (TREE_CODE (to) == COMPONENT_REF
3660 && TREE_READONLY (TREE_OPERAND (to, 1)))
3662 if (offset == 0)
3663 to_rtx = copy_rtx (to_rtx);
3665 RTX_UNCHANGING_P (to_rtx) = 1;
3668 /* Check the access. */
3669 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3671 rtx to_addr;
3672 int size;
3673 int best_mode_size;
3674 enum machine_mode best_mode;
3676 best_mode = get_best_mode (bitsize, bitpos,
3677 TYPE_ALIGN (TREE_TYPE (tem)),
3678 mode1, volatilep);
3679 if (best_mode == VOIDmode)
3680 best_mode = QImode;
3682 best_mode_size = GET_MODE_BITSIZE (best_mode);
3683 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3684 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3685 size *= GET_MODE_SIZE (best_mode);
3687 /* Check the access right of the pointer. */
3688 in_check_memory_usage = 1;
3689 if (size)
3690 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3691 VOIDmode, 3, to_addr, Pmode,
3692 GEN_INT (size), TYPE_MODE (sizetype),
3693 GEN_INT (MEMORY_USE_WO),
3694 TYPE_MODE (integer_type_node));
3695 in_check_memory_usage = 0;
3698 /* If this is a varying-length object, we must get the address of
3699 the source and do an explicit block move. */
3700 if (bitsize < 0)
3702 unsigned int from_align;
3703 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3704 rtx inner_to_rtx
3705 = change_address (to_rtx, VOIDmode,
3706 plus_constant (XEXP (to_rtx, 0),
3707 bitpos / BITS_PER_UNIT));
3709 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
3710 MIN (alignment, from_align));
3711 free_temp_slots ();
3712 pop_temp_slots ();
3713 return to_rtx;
3715 else
3717 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3718 (want_value
3719 /* Spurious cast for HPUX compiler. */
3720 ? ((enum machine_mode)
3721 TYPE_MODE (TREE_TYPE (to)))
3722 : VOIDmode),
3723 unsignedp,
3724 alignment,
3725 int_size_in_bytes (TREE_TYPE (tem)),
3726 get_alias_set (to));
3728 preserve_temp_slots (result);
3729 free_temp_slots ();
3730 pop_temp_slots ();
3732 /* If the value is meaningful, convert RESULT to the proper mode.
3733 Otherwise, return nothing. */
3734 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3735 TYPE_MODE (TREE_TYPE (from)),
3736 result,
3737 TREE_UNSIGNED (TREE_TYPE (to)))
3738 : NULL_RTX);
3742 /* If the rhs is a function call and its value is not an aggregate,
3743 call the function before we start to compute the lhs.
3744 This is needed for correct code for cases such as
3745 val = setjmp (buf) on machines where reference to val
3746 requires loading up part of an address in a separate insn.
3748 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3749 since it might be a promoted variable where the zero- or sign- extension
3750 needs to be done. Handling this in the normal way is safe because no
3751 computation is done before the call. */
3752 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3753 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3754 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3755 && GET_CODE (DECL_RTL (to)) == REG))
3757 rtx value;
3759 push_temp_slots ();
3760 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3761 if (to_rtx == 0)
3762 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3764 /* Handle calls that return values in multiple non-contiguous locations.
3765 The Irix 6 ABI has examples of this. */
3766 if (GET_CODE (to_rtx) == PARALLEL)
3767 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3768 TYPE_ALIGN (TREE_TYPE (from)));
3769 else if (GET_MODE (to_rtx) == BLKmode)
3770 emit_block_move (to_rtx, value, expr_size (from),
3771 TYPE_ALIGN (TREE_TYPE (from)));
3772 else
3774 #ifdef POINTERS_EXTEND_UNSIGNED
3775 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3776 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3777 value = convert_memory_address (GET_MODE (to_rtx), value);
3778 #endif
3779 emit_move_insn (to_rtx, value);
3781 preserve_temp_slots (to_rtx);
3782 free_temp_slots ();
3783 pop_temp_slots ();
3784 return want_value ? to_rtx : NULL_RTX;
3787 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3788 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3790 if (to_rtx == 0)
3792 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3793 if (GET_CODE (to_rtx) == MEM)
3794 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3797 /* Don't move directly into a return register. */
3798 if (TREE_CODE (to) == RESULT_DECL
3799 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3801 rtx temp;
3803 push_temp_slots ();
3804 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3806 if (GET_CODE (to_rtx) == PARALLEL)
3807 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3808 TYPE_ALIGN (TREE_TYPE (from)));
3809 else
3810 emit_move_insn (to_rtx, temp);
3812 preserve_temp_slots (to_rtx);
3813 free_temp_slots ();
3814 pop_temp_slots ();
3815 return want_value ? to_rtx : NULL_RTX;
3818 /* In case we are returning the contents of an object which overlaps
3819 the place the value is being stored, use a safe function when copying
3820 a value through a pointer into a structure value return block. */
3821 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3822 && current_function_returns_struct
3823 && !current_function_returns_pcc_struct)
3825 rtx from_rtx, size;
3827 push_temp_slots ();
3828 size = expr_size (from);
3829 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3830 EXPAND_MEMORY_USE_DONT);
3832 /* Copy the rights of the bitmap. */
3833 if (current_function_check_memory_usage)
3834 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3835 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3836 XEXP (from_rtx, 0), Pmode,
3837 convert_to_mode (TYPE_MODE (sizetype),
3838 size, TREE_UNSIGNED (sizetype)),
3839 TYPE_MODE (sizetype));
3841 #ifdef TARGET_MEM_FUNCTIONS
3842 emit_library_call (memmove_libfunc, LCT_NORMAL,
3843 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3844 XEXP (from_rtx, 0), Pmode,
3845 convert_to_mode (TYPE_MODE (sizetype),
3846 size, TREE_UNSIGNED (sizetype)),
3847 TYPE_MODE (sizetype));
3848 #else
3849 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3850 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3851 XEXP (to_rtx, 0), Pmode,
3852 convert_to_mode (TYPE_MODE (integer_type_node),
3853 size, TREE_UNSIGNED (integer_type_node)),
3854 TYPE_MODE (integer_type_node));
3855 #endif
3857 preserve_temp_slots (to_rtx);
3858 free_temp_slots ();
3859 pop_temp_slots ();
3860 return want_value ? to_rtx : NULL_RTX;
3863 /* Compute FROM and store the value in the rtx we got. */
3865 push_temp_slots ();
3866 result = store_expr (from, to_rtx, want_value);
3867 preserve_temp_slots (result);
3868 free_temp_slots ();
3869 pop_temp_slots ();
3870 return want_value ? result : NULL_RTX;
3873 /* Generate code for computing expression EXP,
3874 and storing the value into TARGET.
3875 TARGET may contain a QUEUED rtx.
3877 If WANT_VALUE is nonzero, return a copy of the value
3878 not in TARGET, so that we can be sure to use the proper
3879 value in a containing expression even if TARGET has something
3880 else stored in it. If possible, we copy the value through a pseudo
3881 and return that pseudo. Or, if the value is constant, we try to
3882 return the constant. In some cases, we return a pseudo
3883 copied *from* TARGET.
3885 If the mode is BLKmode then we may return TARGET itself.
3886 It turns out that in BLKmode it doesn't cause a problem.
3887 because C has no operators that could combine two different
3888 assignments into the same BLKmode object with different values
3889 with no sequence point. Will other languages need this to
3890 be more thorough?
3892 If WANT_VALUE is 0, we return NULL, to make sure
3893 to catch quickly any cases where the caller uses the value
3894 and fails to set WANT_VALUE. */
3897 store_expr (exp, target, want_value)
3898 register tree exp;
3899 register rtx target;
3900 int want_value;
3902 register rtx temp;
3903 int dont_return_target = 0;
3905 if (TREE_CODE (exp) == COMPOUND_EXPR)
3907 /* Perform first part of compound expression, then assign from second
3908 part. */
3909 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3910 emit_queue ();
3911 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3913 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3915 /* For conditional expression, get safe form of the target. Then
3916 test the condition, doing the appropriate assignment on either
3917 side. This avoids the creation of unnecessary temporaries.
3918 For non-BLKmode, it is more efficient not to do this. */
3920 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3922 emit_queue ();
3923 target = protect_from_queue (target, 1);
3925 do_pending_stack_adjust ();
3926 NO_DEFER_POP;
3927 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3928 start_cleanup_deferral ();
3929 store_expr (TREE_OPERAND (exp, 1), target, 0);
3930 end_cleanup_deferral ();
3931 emit_queue ();
3932 emit_jump_insn (gen_jump (lab2));
3933 emit_barrier ();
3934 emit_label (lab1);
3935 start_cleanup_deferral ();
3936 store_expr (TREE_OPERAND (exp, 2), target, 0);
3937 end_cleanup_deferral ();
3938 emit_queue ();
3939 emit_label (lab2);
3940 OK_DEFER_POP;
3942 return want_value ? target : NULL_RTX;
3944 else if (queued_subexp_p (target))
3945 /* If target contains a postincrement, let's not risk
3946 using it as the place to generate the rhs. */
3948 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3950 /* Expand EXP into a new pseudo. */
3951 temp = gen_reg_rtx (GET_MODE (target));
3952 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3954 else
3955 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3957 /* If target is volatile, ANSI requires accessing the value
3958 *from* the target, if it is accessed. So make that happen.
3959 In no case return the target itself. */
3960 if (! MEM_VOLATILE_P (target) && want_value)
3961 dont_return_target = 1;
3963 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3964 && GET_MODE (target) != BLKmode)
3965 /* If target is in memory and caller wants value in a register instead,
3966 arrange that. Pass TARGET as target for expand_expr so that,
3967 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3968 We know expand_expr will not use the target in that case.
3969 Don't do this if TARGET is volatile because we are supposed
3970 to write it and then read it. */
3972 temp = expand_expr (exp, target, GET_MODE (target), 0);
3973 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3974 temp = copy_to_reg (temp);
3975 dont_return_target = 1;
3977 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3978 /* If this is an scalar in a register that is stored in a wider mode
3979 than the declared mode, compute the result into its declared mode
3980 and then convert to the wider mode. Our value is the computed
3981 expression. */
3983 /* If we don't want a value, we can do the conversion inside EXP,
3984 which will often result in some optimizations. Do the conversion
3985 in two steps: first change the signedness, if needed, then
3986 the extend. But don't do this if the type of EXP is a subtype
3987 of something else since then the conversion might involve
3988 more than just converting modes. */
3989 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3990 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3992 if (TREE_UNSIGNED (TREE_TYPE (exp))
3993 != SUBREG_PROMOTED_UNSIGNED_P (target))
3995 = convert
3996 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3997 TREE_TYPE (exp)),
3998 exp);
4000 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4001 SUBREG_PROMOTED_UNSIGNED_P (target)),
4002 exp);
4005 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4007 /* If TEMP is a volatile MEM and we want a result value, make
4008 the access now so it gets done only once. Likewise if
4009 it contains TARGET. */
4010 if (GET_CODE (temp) == MEM && want_value
4011 && (MEM_VOLATILE_P (temp)
4012 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4013 temp = copy_to_reg (temp);
4015 /* If TEMP is a VOIDmode constant, use convert_modes to make
4016 sure that we properly convert it. */
4017 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4018 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4019 TYPE_MODE (TREE_TYPE (exp)), temp,
4020 SUBREG_PROMOTED_UNSIGNED_P (target));
4022 convert_move (SUBREG_REG (target), temp,
4023 SUBREG_PROMOTED_UNSIGNED_P (target));
4025 /* If we promoted a constant, change the mode back down to match
4026 target. Otherwise, the caller might get confused by a result whose
4027 mode is larger than expected. */
4029 if (want_value && GET_MODE (temp) != GET_MODE (target)
4030 && GET_MODE (temp) != VOIDmode)
4032 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
4033 SUBREG_PROMOTED_VAR_P (temp) = 1;
4034 SUBREG_PROMOTED_UNSIGNED_P (temp)
4035 = SUBREG_PROMOTED_UNSIGNED_P (target);
4038 return want_value ? temp : NULL_RTX;
4040 else
4042 temp = expand_expr (exp, target, GET_MODE (target), 0);
4043 /* Return TARGET if it's a specified hardware register.
4044 If TARGET is a volatile mem ref, either return TARGET
4045 or return a reg copied *from* TARGET; ANSI requires this.
4047 Otherwise, if TEMP is not TARGET, return TEMP
4048 if it is constant (for efficiency),
4049 or if we really want the correct value. */
4050 if (!(target && GET_CODE (target) == REG
4051 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4052 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4053 && ! rtx_equal_p (temp, target)
4054 && (CONSTANT_P (temp) || want_value))
4055 dont_return_target = 1;
4058 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4059 the same as that of TARGET, adjust the constant. This is needed, for
4060 example, in case it is a CONST_DOUBLE and we want only a word-sized
4061 value. */
4062 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4063 && TREE_CODE (exp) != ERROR_MARK
4064 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4065 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4066 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4068 if (current_function_check_memory_usage
4069 && GET_CODE (target) == MEM
4070 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
4072 in_check_memory_usage = 1;
4073 if (GET_CODE (temp) == MEM)
4074 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
4075 VOIDmode, 3, XEXP (target, 0), Pmode,
4076 XEXP (temp, 0), Pmode,
4077 expr_size (exp), TYPE_MODE (sizetype));
4078 else
4079 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
4080 VOIDmode, 3, XEXP (target, 0), Pmode,
4081 expr_size (exp), TYPE_MODE (sizetype),
4082 GEN_INT (MEMORY_USE_WO),
4083 TYPE_MODE (integer_type_node));
4084 in_check_memory_usage = 0;
4087 /* If value was not generated in the target, store it there.
4088 Convert the value to TARGET's type first if nec. */
4089 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4090 one or both of them are volatile memory refs, we have to distinguish
4091 two cases:
4092 - expand_expr has used TARGET. In this case, we must not generate
4093 another copy. This can be detected by TARGET being equal according
4094 to == .
4095 - expand_expr has not used TARGET - that means that the source just
4096 happens to have the same RTX form. Since temp will have been created
4097 by expand_expr, it will compare unequal according to == .
4098 We must generate a copy in this case, to reach the correct number
4099 of volatile memory references. */
4101 if ((! rtx_equal_p (temp, target)
4102 || (temp != target && (side_effects_p (temp)
4103 || side_effects_p (target))))
4104 && TREE_CODE (exp) != ERROR_MARK)
4106 target = protect_from_queue (target, 1);
4107 if (GET_MODE (temp) != GET_MODE (target)
4108 && GET_MODE (temp) != VOIDmode)
4110 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4111 if (dont_return_target)
4113 /* In this case, we will return TEMP,
4114 so make sure it has the proper mode.
4115 But don't forget to store the value into TARGET. */
4116 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4117 emit_move_insn (target, temp);
4119 else
4120 convert_move (target, temp, unsignedp);
4123 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4125 /* Handle copying a string constant into an array.
4126 The string constant may be shorter than the array.
4127 So copy just the string's actual length, and clear the rest. */
4128 rtx size;
4129 rtx addr;
4131 /* Get the size of the data type of the string,
4132 which is actually the size of the target. */
4133 size = expr_size (exp);
4134 if (GET_CODE (size) == CONST_INT
4135 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4136 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
4137 else
4139 /* Compute the size of the data to copy from the string. */
4140 tree copy_size
4141 = size_binop (MIN_EXPR,
4142 make_tree (sizetype, size),
4143 size_int (TREE_STRING_LENGTH (exp)));
4144 unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
4145 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4146 VOIDmode, 0);
4147 rtx label = 0;
4149 /* Copy that much. */
4150 emit_block_move (target, temp, copy_size_rtx,
4151 TYPE_ALIGN (TREE_TYPE (exp)));
4153 /* Figure out how much is left in TARGET that we have to clear.
4154 Do all calculations in ptr_mode. */
4156 addr = XEXP (target, 0);
4157 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4159 if (GET_CODE (copy_size_rtx) == CONST_INT)
4161 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
4162 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
4163 align = MIN (align,
4164 (unsigned int) (BITS_PER_UNIT
4165 * (INTVAL (copy_size_rtx)
4166 & - INTVAL (copy_size_rtx))));
4168 else
4170 addr = force_reg (ptr_mode, addr);
4171 addr = expand_binop (ptr_mode, add_optab, addr,
4172 copy_size_rtx, NULL_RTX, 0,
4173 OPTAB_LIB_WIDEN);
4175 size = expand_binop (ptr_mode, sub_optab, size,
4176 copy_size_rtx, NULL_RTX, 0,
4177 OPTAB_LIB_WIDEN);
4179 align = BITS_PER_UNIT;
4180 label = gen_label_rtx ();
4181 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4182 GET_MODE (size), 0, 0, label);
4184 align = MIN (align, expr_align (copy_size));
4186 if (size != const0_rtx)
4188 rtx dest = gen_rtx_MEM (BLKmode, addr);
4190 MEM_COPY_ATTRIBUTES (dest, target);
4192 /* Be sure we can write on ADDR. */
4193 in_check_memory_usage = 1;
4194 if (current_function_check_memory_usage)
4195 emit_library_call (chkr_check_addr_libfunc,
4196 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4197 addr, Pmode,
4198 size, TYPE_MODE (sizetype),
4199 GEN_INT (MEMORY_USE_WO),
4200 TYPE_MODE (integer_type_node));
4201 in_check_memory_usage = 0;
4202 clear_storage (dest, size, align);
4205 if (label)
4206 emit_label (label);
4209 /* Handle calls that return values in multiple non-contiguous locations.
4210 The Irix 6 ABI has examples of this. */
4211 else if (GET_CODE (target) == PARALLEL)
4212 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4213 TYPE_ALIGN (TREE_TYPE (exp)));
4214 else if (GET_MODE (temp) == BLKmode)
4215 emit_block_move (target, temp, expr_size (exp),
4216 TYPE_ALIGN (TREE_TYPE (exp)));
4217 else
4218 emit_move_insn (target, temp);
4221 /* If we don't want a value, return NULL_RTX. */
4222 if (! want_value)
4223 return NULL_RTX;
4225 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4226 ??? The latter test doesn't seem to make sense. */
4227 else if (dont_return_target && GET_CODE (temp) != MEM)
4228 return temp;
4230 /* Return TARGET itself if it is a hard register. */
4231 else if (want_value && GET_MODE (target) != BLKmode
4232 && ! (GET_CODE (target) == REG
4233 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4234 return copy_to_reg (target);
4236 else
4237 return target;
4240 /* Return 1 if EXP just contains zeros. */
4242 static int
4243 is_zeros_p (exp)
4244 tree exp;
4246 tree elt;
4248 switch (TREE_CODE (exp))
4250 case CONVERT_EXPR:
4251 case NOP_EXPR:
4252 case NON_LVALUE_EXPR:
4253 return is_zeros_p (TREE_OPERAND (exp, 0));
4255 case INTEGER_CST:
4256 return integer_zerop (exp);
4258 case COMPLEX_CST:
4259 return
4260 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4262 case REAL_CST:
4263 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4265 case CONSTRUCTOR:
4266 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4267 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4268 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4269 if (! is_zeros_p (TREE_VALUE (elt)))
4270 return 0;
4272 return 1;
4274 default:
4275 return 0;
4279 /* Return 1 if EXP contains mostly (3/4) zeros. */
4281 static int
4282 mostly_zeros_p (exp)
4283 tree exp;
4285 if (TREE_CODE (exp) == CONSTRUCTOR)
4287 int elts = 0, zeros = 0;
4288 tree elt = CONSTRUCTOR_ELTS (exp);
4289 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4291 /* If there are no ranges of true bits, it is all zero. */
4292 return elt == NULL_TREE;
4294 for (; elt; elt = TREE_CHAIN (elt))
4296 /* We do not handle the case where the index is a RANGE_EXPR,
4297 so the statistic will be somewhat inaccurate.
4298 We do make a more accurate count in store_constructor itself,
4299 so since this function is only used for nested array elements,
4300 this should be close enough. */
4301 if (mostly_zeros_p (TREE_VALUE (elt)))
4302 zeros++;
4303 elts++;
4306 return 4 * zeros >= 3 * elts;
4309 return is_zeros_p (exp);
4312 /* Helper function for store_constructor.
4313 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4314 TYPE is the type of the CONSTRUCTOR, not the element type.
4315 ALIGN and CLEARED are as for store_constructor.
4316 ALIAS_SET is the alias set to use for any stores.
4318 This provides a recursive shortcut back to store_constructor when it isn't
4319 necessary to go through store_field. This is so that we can pass through
4320 the cleared field to let store_constructor know that we may not have to
4321 clear a substructure if the outer structure has already been cleared. */
4323 static void
4324 store_constructor_field (target, bitsize, bitpos,
4325 mode, exp, type, align, cleared, alias_set)
4326 rtx target;
4327 unsigned HOST_WIDE_INT bitsize;
4328 HOST_WIDE_INT bitpos;
4329 enum machine_mode mode;
4330 tree exp, type;
4331 unsigned int align;
4332 int cleared;
4333 int alias_set;
4335 if (TREE_CODE (exp) == CONSTRUCTOR
4336 && bitpos % BITS_PER_UNIT == 0
4337 /* If we have a non-zero bitpos for a register target, then we just
4338 let store_field do the bitfield handling. This is unlikely to
4339 generate unnecessary clear instructions anyways. */
4340 && (bitpos == 0 || GET_CODE (target) == MEM))
4342 if (bitpos != 0)
4343 target
4344 = change_address (target,
4345 GET_MODE (target) == BLKmode
4346 || 0 != (bitpos
4347 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4348 ? BLKmode : VOIDmode,
4349 plus_constant (XEXP (target, 0),
4350 bitpos / BITS_PER_UNIT));
4353 /* Show the alignment may no longer be what it was and update the alias
4354 set, if required. */
4355 if (bitpos != 0)
4356 align = MIN (align, (unsigned int) bitpos & - bitpos);
4357 if (GET_CODE (target) == MEM)
4358 MEM_ALIAS_SET (target) = alias_set;
4360 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4362 else
4363 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4364 int_size_in_bytes (type), alias_set);
4367 /* Store the value of constructor EXP into the rtx TARGET.
4368 TARGET is either a REG or a MEM.
4369 ALIGN is the maximum known alignment for TARGET.
4370 CLEARED is true if TARGET is known to have been zero'd.
4371 SIZE is the number of bytes of TARGET we are allowed to modify: this
4372 may not be the same as the size of EXP if we are assigning to a field
4373 which has been packed to exclude padding bits. */
4375 static void
4376 store_constructor (exp, target, align, cleared, size)
4377 tree exp;
4378 rtx target;
4379 unsigned int align;
4380 int cleared;
4381 HOST_WIDE_INT size;
4383 tree type = TREE_TYPE (exp);
4384 #ifdef WORD_REGISTER_OPERATIONS
4385 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4386 #endif
4388 /* We know our target cannot conflict, since safe_from_p has been called. */
4389 #if 0
4390 /* Don't try copying piece by piece into a hard register
4391 since that is vulnerable to being clobbered by EXP.
4392 Instead, construct in a pseudo register and then copy it all. */
4393 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4395 rtx temp = gen_reg_rtx (GET_MODE (target));
4396 store_constructor (exp, temp, align, cleared, size);
4397 emit_move_insn (target, temp);
4398 return;
4400 #endif
4402 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4403 || TREE_CODE (type) == QUAL_UNION_TYPE)
4405 register tree elt;
4407 /* Inform later passes that the whole union value is dead. */
4408 if ((TREE_CODE (type) == UNION_TYPE
4409 || TREE_CODE (type) == QUAL_UNION_TYPE)
4410 && ! cleared)
4412 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4414 /* If the constructor is empty, clear the union. */
4415 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4416 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4419 /* If we are building a static constructor into a register,
4420 set the initial value as zero so we can fold the value into
4421 a constant. But if more than one register is involved,
4422 this probably loses. */
4423 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4424 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4426 if (! cleared)
4427 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4429 cleared = 1;
4432 /* If the constructor has fewer fields than the structure
4433 or if we are initializing the structure to mostly zeros,
4434 clear the whole structure first. Don't do this if TARGET is a
4435 register whose mode size isn't equal to SIZE since clear_storage
4436 can't handle this case. */
4437 else if (size > 0
4438 && ((list_length (CONSTRUCTOR_ELTS (exp))
4439 != fields_length (type))
4440 || mostly_zeros_p (exp))
4441 && (GET_CODE (target) != REG
4442 || (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) == size))
4444 if (! cleared)
4445 clear_storage (target, GEN_INT (size), align);
4447 cleared = 1;
4449 else if (! cleared)
4450 /* Inform later passes that the old value is dead. */
4451 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4453 /* Store each element of the constructor into
4454 the corresponding field of TARGET. */
4456 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4458 register tree field = TREE_PURPOSE (elt);
4459 #ifdef WORD_REGISTER_OPERATIONS
4460 tree value = TREE_VALUE (elt);
4461 #endif
4462 register enum machine_mode mode;
4463 HOST_WIDE_INT bitsize;
4464 HOST_WIDE_INT bitpos = 0;
4465 int unsignedp;
4466 tree offset;
4467 rtx to_rtx = target;
4469 /* Just ignore missing fields.
4470 We cleared the whole structure, above,
4471 if any fields are missing. */
4472 if (field == 0)
4473 continue;
4475 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4476 continue;
4478 if (host_integerp (DECL_SIZE (field), 1))
4479 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4480 else
4481 bitsize = -1;
4483 unsignedp = TREE_UNSIGNED (field);
4484 mode = DECL_MODE (field);
4485 if (DECL_BIT_FIELD (field))
4486 mode = VOIDmode;
4488 offset = DECL_FIELD_OFFSET (field);
4489 if (host_integerp (offset, 0)
4490 && host_integerp (bit_position (field), 0))
4492 bitpos = int_bit_position (field);
4493 offset = 0;
4495 else
4496 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4498 if (offset)
4500 rtx offset_rtx;
4502 if (contains_placeholder_p (offset))
4503 offset = build (WITH_RECORD_EXPR, sizetype,
4504 offset, make_tree (TREE_TYPE (exp), target));
4506 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4507 if (GET_CODE (to_rtx) != MEM)
4508 abort ();
4510 if (GET_MODE (offset_rtx) != ptr_mode)
4512 #ifdef POINTERS_EXTEND_UNSIGNED
4513 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4514 #else
4515 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4516 #endif
4519 to_rtx
4520 = change_address (to_rtx, VOIDmode,
4521 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4522 force_reg (ptr_mode,
4523 offset_rtx)));
4524 align = DECL_OFFSET_ALIGN (field);
4527 if (TREE_READONLY (field))
4529 if (GET_CODE (to_rtx) == MEM)
4530 to_rtx = copy_rtx (to_rtx);
4532 RTX_UNCHANGING_P (to_rtx) = 1;
4535 #ifdef WORD_REGISTER_OPERATIONS
4536 /* If this initializes a field that is smaller than a word, at the
4537 start of a word, try to widen it to a full word.
4538 This special case allows us to output C++ member function
4539 initializations in a form that the optimizers can understand. */
4540 if (GET_CODE (target) == REG
4541 && bitsize < BITS_PER_WORD
4542 && bitpos % BITS_PER_WORD == 0
4543 && GET_MODE_CLASS (mode) == MODE_INT
4544 && TREE_CODE (value) == INTEGER_CST
4545 && exp_size >= 0
4546 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4548 tree type = TREE_TYPE (value);
4549 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4551 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4552 value = convert (type, value);
4554 if (BYTES_BIG_ENDIAN)
4555 value
4556 = fold (build (LSHIFT_EXPR, type, value,
4557 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4558 bitsize = BITS_PER_WORD;
4559 mode = word_mode;
4561 #endif
4562 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4563 TREE_VALUE (elt), type, align, cleared,
4564 (DECL_NONADDRESSABLE_P (field)
4565 && GET_CODE (to_rtx) == MEM)
4566 ? MEM_ALIAS_SET (to_rtx)
4567 : get_alias_set (TREE_TYPE (field)));
4570 else if (TREE_CODE (type) == ARRAY_TYPE)
4572 register tree elt;
4573 register int i;
4574 int need_to_clear;
4575 tree domain = TYPE_DOMAIN (type);
4576 tree elttype = TREE_TYPE (type);
4577 int const_bounds_p = (host_integerp (TYPE_MIN_VALUE (domain), 0)
4578 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4579 HOST_WIDE_INT minelt;
4580 HOST_WIDE_INT maxelt;
4582 /* If we have constant bounds for the range of the type, get them. */
4583 if (const_bounds_p)
4585 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4586 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4589 /* If the constructor has fewer elements than the array,
4590 clear the whole array first. Similarly if this is
4591 static constructor of a non-BLKmode object. */
4592 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4593 need_to_clear = 1;
4594 else
4596 HOST_WIDE_INT count = 0, zero_count = 0;
4597 need_to_clear = ! const_bounds_p;
4599 /* This loop is a more accurate version of the loop in
4600 mostly_zeros_p (it handles RANGE_EXPR in an index).
4601 It is also needed to check for missing elements. */
4602 for (elt = CONSTRUCTOR_ELTS (exp);
4603 elt != NULL_TREE && ! need_to_clear;
4604 elt = TREE_CHAIN (elt))
4606 tree index = TREE_PURPOSE (elt);
4607 HOST_WIDE_INT this_node_count;
4609 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4611 tree lo_index = TREE_OPERAND (index, 0);
4612 tree hi_index = TREE_OPERAND (index, 1);
4614 if (! host_integerp (lo_index, 1)
4615 || ! host_integerp (hi_index, 1))
4617 need_to_clear = 1;
4618 break;
4621 this_node_count = (tree_low_cst (hi_index, 1)
4622 - tree_low_cst (lo_index, 1) + 1);
4624 else
4625 this_node_count = 1;
4627 count += this_node_count;
4628 if (mostly_zeros_p (TREE_VALUE (elt)))
4629 zero_count += this_node_count;
4632 /* Clear the entire array first if there are any missing elements,
4633 or if the incidence of zero elements is >= 75%. */
4634 if (! need_to_clear
4635 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4636 need_to_clear = 1;
4639 if (need_to_clear && size > 0)
4641 if (! cleared)
4642 clear_storage (target, GEN_INT (size), align);
4643 cleared = 1;
4645 else
4646 /* Inform later passes that the old value is dead. */
4647 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4649 /* Store each element of the constructor into
4650 the corresponding element of TARGET, determined
4651 by counting the elements. */
4652 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4653 elt;
4654 elt = TREE_CHAIN (elt), i++)
4656 register enum machine_mode mode;
4657 HOST_WIDE_INT bitsize;
4658 HOST_WIDE_INT bitpos;
4659 int unsignedp;
4660 tree value = TREE_VALUE (elt);
4661 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4662 tree index = TREE_PURPOSE (elt);
4663 rtx xtarget = target;
4665 if (cleared && is_zeros_p (value))
4666 continue;
4668 unsignedp = TREE_UNSIGNED (elttype);
4669 mode = TYPE_MODE (elttype);
4670 if (mode == BLKmode)
4671 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4672 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4673 : -1);
4674 else
4675 bitsize = GET_MODE_BITSIZE (mode);
4677 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4679 tree lo_index = TREE_OPERAND (index, 0);
4680 tree hi_index = TREE_OPERAND (index, 1);
4681 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4682 struct nesting *loop;
4683 HOST_WIDE_INT lo, hi, count;
4684 tree position;
4686 /* If the range is constant and "small", unroll the loop. */
4687 if (const_bounds_p
4688 && host_integerp (lo_index, 0)
4689 && host_integerp (hi_index, 0)
4690 && (lo = tree_low_cst (lo_index, 0),
4691 hi = tree_low_cst (hi_index, 0),
4692 count = hi - lo + 1,
4693 (GET_CODE (target) != MEM
4694 || count <= 2
4695 || (host_integerp (TYPE_SIZE (elttype), 1)
4696 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4697 <= 40 * 8)))))
4699 lo -= minelt; hi -= minelt;
4700 for (; lo <= hi; lo++)
4702 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4703 store_constructor_field
4704 (target, bitsize, bitpos, mode, value, type, align,
4705 cleared,
4706 TYPE_NONALIASED_COMPONENT (type)
4707 ? MEM_ALIAS_SET (target) : get_alias_set (elttype));
4710 else
4712 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4713 loop_top = gen_label_rtx ();
4714 loop_end = gen_label_rtx ();
4716 unsignedp = TREE_UNSIGNED (domain);
4718 index = build_decl (VAR_DECL, NULL_TREE, domain);
4720 index_r
4721 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4722 &unsignedp, 0));
4723 SET_DECL_RTL (index, index_r);
4724 if (TREE_CODE (value) == SAVE_EXPR
4725 && SAVE_EXPR_RTL (value) == 0)
4727 /* Make sure value gets expanded once before the
4728 loop. */
4729 expand_expr (value, const0_rtx, VOIDmode, 0);
4730 emit_queue ();
4732 store_expr (lo_index, index_r, 0);
4733 loop = expand_start_loop (0);
4735 /* Assign value to element index. */
4736 position
4737 = convert (ssizetype,
4738 fold (build (MINUS_EXPR, TREE_TYPE (index),
4739 index, TYPE_MIN_VALUE (domain))));
4740 position = size_binop (MULT_EXPR, position,
4741 convert (ssizetype,
4742 TYPE_SIZE_UNIT (elttype)));
4744 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4745 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4746 xtarget = change_address (target, mode, addr);
4747 if (TREE_CODE (value) == CONSTRUCTOR)
4748 store_constructor (value, xtarget, align, cleared,
4749 bitsize / BITS_PER_UNIT);
4750 else
4751 store_expr (value, xtarget, 0);
4753 expand_exit_loop_if_false (loop,
4754 build (LT_EXPR, integer_type_node,
4755 index, hi_index));
4757 expand_increment (build (PREINCREMENT_EXPR,
4758 TREE_TYPE (index),
4759 index, integer_one_node), 0, 0);
4760 expand_end_loop ();
4761 emit_label (loop_end);
4764 else if ((index != 0 && ! host_integerp (index, 0))
4765 || ! host_integerp (TYPE_SIZE (elttype), 1))
4767 rtx pos_rtx, addr;
4768 tree position;
4770 if (index == 0)
4771 index = ssize_int (1);
4773 if (minelt)
4774 index = convert (ssizetype,
4775 fold (build (MINUS_EXPR, index,
4776 TYPE_MIN_VALUE (domain))));
4778 position = size_binop (MULT_EXPR, index,
4779 convert (ssizetype,
4780 TYPE_SIZE_UNIT (elttype)));
4781 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4782 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4783 xtarget = change_address (target, mode, addr);
4784 store_expr (value, xtarget, 0);
4786 else
4788 if (index != 0)
4789 bitpos = ((tree_low_cst (index, 0) - minelt)
4790 * tree_low_cst (TYPE_SIZE (elttype), 1));
4791 else
4792 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4794 store_constructor_field (target, bitsize, bitpos, mode, value,
4795 type, align, cleared,
4796 TYPE_NONALIASED_COMPONENT (type)
4797 && GET_CODE (target) == MEM
4798 ? MEM_ALIAS_SET (target) :
4799 get_alias_set (elttype));
4805 /* Set constructor assignments. */
4806 else if (TREE_CODE (type) == SET_TYPE)
4808 tree elt = CONSTRUCTOR_ELTS (exp);
4809 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4810 tree domain = TYPE_DOMAIN (type);
4811 tree domain_min, domain_max, bitlength;
4813 /* The default implementation strategy is to extract the constant
4814 parts of the constructor, use that to initialize the target,
4815 and then "or" in whatever non-constant ranges we need in addition.
4817 If a large set is all zero or all ones, it is
4818 probably better to set it using memset (if available) or bzero.
4819 Also, if a large set has just a single range, it may also be
4820 better to first clear all the first clear the set (using
4821 bzero/memset), and set the bits we want. */
4823 /* Check for all zeros. */
4824 if (elt == NULL_TREE && size > 0)
4826 if (!cleared)
4827 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
4828 return;
4831 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4832 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4833 bitlength = size_binop (PLUS_EXPR,
4834 size_diffop (domain_max, domain_min),
4835 ssize_int (1));
4837 nbits = tree_low_cst (bitlength, 1);
4839 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4840 are "complicated" (more than one range), initialize (the
4841 constant parts) by copying from a constant. */
4842 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4843 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4845 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4846 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4847 char *bit_buffer = (char *) alloca (nbits);
4848 HOST_WIDE_INT word = 0;
4849 unsigned int bit_pos = 0;
4850 unsigned int ibit = 0;
4851 unsigned int offset = 0; /* In bytes from beginning of set. */
4853 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4854 for (;;)
4856 if (bit_buffer[ibit])
4858 if (BYTES_BIG_ENDIAN)
4859 word |= (1 << (set_word_size - 1 - bit_pos));
4860 else
4861 word |= 1 << bit_pos;
4864 bit_pos++; ibit++;
4865 if (bit_pos >= set_word_size || ibit == nbits)
4867 if (word != 0 || ! cleared)
4869 rtx datum = GEN_INT (word);
4870 rtx to_rtx;
4872 /* The assumption here is that it is safe to use
4873 XEXP if the set is multi-word, but not if
4874 it's single-word. */
4875 if (GET_CODE (target) == MEM)
4877 to_rtx = plus_constant (XEXP (target, 0), offset);
4878 to_rtx = change_address (target, mode, to_rtx);
4880 else if (offset == 0)
4881 to_rtx = target;
4882 else
4883 abort ();
4884 emit_move_insn (to_rtx, datum);
4887 if (ibit == nbits)
4888 break;
4889 word = 0;
4890 bit_pos = 0;
4891 offset += set_word_size / BITS_PER_UNIT;
4895 else if (!cleared)
4896 /* Don't bother clearing storage if the set is all ones. */
4897 if (TREE_CHAIN (elt) != NULL_TREE
4898 || (TREE_PURPOSE (elt) == NULL_TREE
4899 ? nbits != 1
4900 : ( ! host_integerp (TREE_VALUE (elt), 0)
4901 || ! host_integerp (TREE_PURPOSE (elt), 0)
4902 || (tree_low_cst (TREE_VALUE (elt), 0)
4903 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4904 != (HOST_WIDE_INT) nbits))))
4905 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4907 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4909 /* Start of range of element or NULL. */
4910 tree startbit = TREE_PURPOSE (elt);
4911 /* End of range of element, or element value. */
4912 tree endbit = TREE_VALUE (elt);
4913 #ifdef TARGET_MEM_FUNCTIONS
4914 HOST_WIDE_INT startb, endb;
4915 #endif
4916 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4918 bitlength_rtx = expand_expr (bitlength,
4919 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4921 /* Handle non-range tuple element like [ expr ]. */
4922 if (startbit == NULL_TREE)
4924 startbit = save_expr (endbit);
4925 endbit = startbit;
4928 startbit = convert (sizetype, startbit);
4929 endbit = convert (sizetype, endbit);
4930 if (! integer_zerop (domain_min))
4932 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4933 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4935 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4936 EXPAND_CONST_ADDRESS);
4937 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4938 EXPAND_CONST_ADDRESS);
4940 if (REG_P (target))
4942 targetx
4943 = assign_temp
4944 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
4945 TYPE_QUAL_CONST)),
4946 0, 1, 1);
4947 emit_move_insn (targetx, target);
4950 else if (GET_CODE (target) == MEM)
4951 targetx = target;
4952 else
4953 abort ();
4955 #ifdef TARGET_MEM_FUNCTIONS
4956 /* Optimization: If startbit and endbit are
4957 constants divisible by BITS_PER_UNIT,
4958 call memset instead. */
4959 if (TREE_CODE (startbit) == INTEGER_CST
4960 && TREE_CODE (endbit) == INTEGER_CST
4961 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4962 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4964 emit_library_call (memset_libfunc, LCT_NORMAL,
4965 VOIDmode, 3,
4966 plus_constant (XEXP (targetx, 0),
4967 startb / BITS_PER_UNIT),
4968 Pmode,
4969 constm1_rtx, TYPE_MODE (integer_type_node),
4970 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4971 TYPE_MODE (sizetype));
4973 else
4974 #endif
4975 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4976 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
4977 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
4978 startbit_rtx, TYPE_MODE (sizetype),
4979 endbit_rtx, TYPE_MODE (sizetype));
4981 if (REG_P (target))
4982 emit_move_insn (target, targetx);
4986 else
4987 abort ();
4990 /* Store the value of EXP (an expression tree)
4991 into a subfield of TARGET which has mode MODE and occupies
4992 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4993 If MODE is VOIDmode, it means that we are storing into a bit-field.
4995 If VALUE_MODE is VOIDmode, return nothing in particular.
4996 UNSIGNEDP is not used in this case.
4998 Otherwise, return an rtx for the value stored. This rtx
4999 has mode VALUE_MODE if that is convenient to do.
5000 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5002 ALIGN is the alignment that TARGET is known to have.
5003 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
5005 ALIAS_SET is the alias set for the destination. This value will
5006 (in general) be different from that for TARGET, since TARGET is a
5007 reference to the containing structure. */
5009 static rtx
5010 store_field (target, bitsize, bitpos, mode, exp, value_mode,
5011 unsignedp, align, total_size, alias_set)
5012 rtx target;
5013 HOST_WIDE_INT bitsize;
5014 HOST_WIDE_INT bitpos;
5015 enum machine_mode mode;
5016 tree exp;
5017 enum machine_mode value_mode;
5018 int unsignedp;
5019 unsigned int align;
5020 HOST_WIDE_INT total_size;
5021 int alias_set;
5023 HOST_WIDE_INT width_mask = 0;
5025 if (TREE_CODE (exp) == ERROR_MARK)
5026 return const0_rtx;
5028 /* If we have nothing to store, do nothing unless the expression has
5029 side-effects. */
5030 if (bitsize == 0)
5031 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5033 if (bitsize < HOST_BITS_PER_WIDE_INT)
5034 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5036 /* If we are storing into an unaligned field of an aligned union that is
5037 in a register, we may have the mode of TARGET being an integer mode but
5038 MODE == BLKmode. In that case, get an aligned object whose size and
5039 alignment are the same as TARGET and store TARGET into it (we can avoid
5040 the store if the field being stored is the entire width of TARGET). Then
5041 call ourselves recursively to store the field into a BLKmode version of
5042 that object. Finally, load from the object into TARGET. This is not
5043 very efficient in general, but should only be slightly more expensive
5044 than the otherwise-required unaligned accesses. Perhaps this can be
5045 cleaned up later. */
5047 if (mode == BLKmode
5048 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5050 rtx object
5051 = assign_temp
5052 (build_qualified_type (type_for_mode (GET_MODE (target), 0),
5053 TYPE_QUAL_CONST),
5054 0, 1, 1);
5055 rtx blk_object = copy_rtx (object);
5057 PUT_MODE (blk_object, BLKmode);
5059 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5060 emit_move_insn (object, target);
5062 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
5063 align, total_size, alias_set);
5065 /* Even though we aren't returning target, we need to
5066 give it the updated value. */
5067 emit_move_insn (target, object);
5069 return blk_object;
5072 if (GET_CODE (target) == CONCAT)
5074 /* We're storing into a struct containing a single __complex. */
5076 if (bitpos != 0)
5077 abort ();
5078 return store_expr (exp, target, 0);
5081 /* If the structure is in a register or if the component
5082 is a bit field, we cannot use addressing to access it.
5083 Use bit-field techniques or SUBREG to store in it. */
5085 if (mode == VOIDmode
5086 || (mode != BLKmode && ! direct_store[(int) mode]
5087 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5088 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5089 || GET_CODE (target) == REG
5090 || GET_CODE (target) == SUBREG
5091 /* If the field isn't aligned enough to store as an ordinary memref,
5092 store it as a bit field. */
5093 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5094 && (align < GET_MODE_ALIGNMENT (mode)
5095 || bitpos % GET_MODE_ALIGNMENT (mode)))
5096 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5097 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
5098 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
5099 /* If the RHS and field are a constant size and the size of the
5100 RHS isn't the same size as the bitfield, we must use bitfield
5101 operations. */
5102 || (bitsize >= 0
5103 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5104 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5106 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5108 /* If BITSIZE is narrower than the size of the type of EXP
5109 we will be narrowing TEMP. Normally, what's wanted are the
5110 low-order bits. However, if EXP's type is a record and this is
5111 big-endian machine, we want the upper BITSIZE bits. */
5112 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5113 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
5114 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5115 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5116 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5117 - bitsize),
5118 temp, 1);
5120 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5121 MODE. */
5122 if (mode != VOIDmode && mode != BLKmode
5123 && mode != TYPE_MODE (TREE_TYPE (exp)))
5124 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5126 /* If the modes of TARGET and TEMP are both BLKmode, both
5127 must be in memory and BITPOS must be aligned on a byte
5128 boundary. If so, we simply do a block copy. */
5129 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5131 unsigned int exp_align = expr_align (exp);
5133 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5134 || bitpos % BITS_PER_UNIT != 0)
5135 abort ();
5137 target = change_address (target, VOIDmode,
5138 plus_constant (XEXP (target, 0),
5139 bitpos / BITS_PER_UNIT));
5141 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
5142 align = MIN (exp_align, align);
5144 /* Find an alignment that is consistent with the bit position. */
5145 while ((bitpos % align) != 0)
5146 align >>= 1;
5148 emit_block_move (target, temp,
5149 bitsize == -1 ? expr_size (exp)
5150 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5151 / BITS_PER_UNIT),
5152 align);
5154 return value_mode == VOIDmode ? const0_rtx : target;
5157 /* Store the value in the bitfield. */
5158 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
5159 if (value_mode != VOIDmode)
5161 /* The caller wants an rtx for the value. */
5162 /* If possible, avoid refetching from the bitfield itself. */
5163 if (width_mask != 0
5164 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5166 tree count;
5167 enum machine_mode tmode;
5169 if (unsignedp)
5170 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
5171 tmode = GET_MODE (temp);
5172 if (tmode == VOIDmode)
5173 tmode = value_mode;
5174 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5175 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5176 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5178 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5179 NULL_RTX, value_mode, 0, align,
5180 total_size);
5182 return const0_rtx;
5184 else
5186 rtx addr = XEXP (target, 0);
5187 rtx to_rtx;
5189 /* If a value is wanted, it must be the lhs;
5190 so make the address stable for multiple use. */
5192 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5193 && ! CONSTANT_ADDRESS_P (addr)
5194 /* A frame-pointer reference is already stable. */
5195 && ! (GET_CODE (addr) == PLUS
5196 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5197 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5198 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5199 addr = copy_to_reg (addr);
5201 /* Now build a reference to just the desired component. */
5203 to_rtx = copy_rtx (change_address (target, mode,
5204 plus_constant (addr,
5205 (bitpos
5206 / BITS_PER_UNIT))));
5207 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5208 /* If the address of the structure varies, then it might be on
5209 the stack. And, stack slots may be shared across scopes.
5210 So, two different structures, of different types, can end up
5211 at the same location. We will give the structures alias set
5212 zero; here we must be careful not to give non-zero alias sets
5213 to their fields. */
5214 if (!rtx_varies_p (addr, /*for_alias=*/0))
5215 MEM_ALIAS_SET (to_rtx) = alias_set;
5216 else
5217 MEM_ALIAS_SET (to_rtx) = 0;
5219 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5223 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5224 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
5225 ARRAY_REFs and find the ultimate containing object, which we return.
5227 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5228 bit position, and *PUNSIGNEDP to the signedness of the field.
5229 If the position of the field is variable, we store a tree
5230 giving the variable offset (in units) in *POFFSET.
5231 This offset is in addition to the bit position.
5232 If the position is not variable, we store 0 in *POFFSET.
5233 We set *PALIGNMENT to the alignment of the address that will be
5234 computed. This is the alignment of the thing we return if *POFFSET
5235 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5237 If any of the extraction expressions is volatile,
5238 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5240 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5241 is a mode that can be used to access the field. In that case, *PBITSIZE
5242 is redundant.
5244 If the field describes a variable-sized object, *PMODE is set to
5245 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5246 this case, but the address of the object can be found. */
5248 tree
5249 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5250 punsignedp, pvolatilep, palignment)
5251 tree exp;
5252 HOST_WIDE_INT *pbitsize;
5253 HOST_WIDE_INT *pbitpos;
5254 tree *poffset;
5255 enum machine_mode *pmode;
5256 int *punsignedp;
5257 int *pvolatilep;
5258 unsigned int *palignment;
5260 tree size_tree = 0;
5261 enum machine_mode mode = VOIDmode;
5262 tree offset = size_zero_node;
5263 tree bit_offset = bitsize_zero_node;
5264 unsigned int alignment = BIGGEST_ALIGNMENT;
5265 tree tem;
5267 /* First get the mode, signedness, and size. We do this from just the
5268 outermost expression. */
5269 if (TREE_CODE (exp) == COMPONENT_REF)
5271 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5272 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5273 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5275 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5277 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5279 size_tree = TREE_OPERAND (exp, 1);
5280 *punsignedp = TREE_UNSIGNED (exp);
5282 else
5284 mode = TYPE_MODE (TREE_TYPE (exp));
5285 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5287 if (mode == BLKmode)
5288 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5289 else
5290 *pbitsize = GET_MODE_BITSIZE (mode);
5293 if (size_tree != 0)
5295 if (! host_integerp (size_tree, 1))
5296 mode = BLKmode, *pbitsize = -1;
5297 else
5298 *pbitsize = tree_low_cst (size_tree, 1);
5301 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5302 and find the ultimate containing object. */
5303 while (1)
5305 if (TREE_CODE (exp) == BIT_FIELD_REF)
5306 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5307 else if (TREE_CODE (exp) == COMPONENT_REF)
5309 tree field = TREE_OPERAND (exp, 1);
5310 tree this_offset = DECL_FIELD_OFFSET (field);
5312 /* If this field hasn't been filled in yet, don't go
5313 past it. This should only happen when folding expressions
5314 made during type construction. */
5315 if (this_offset == 0)
5316 break;
5317 else if (! TREE_CONSTANT (this_offset)
5318 && contains_placeholder_p (this_offset))
5319 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5321 offset = size_binop (PLUS_EXPR, offset, this_offset);
5322 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5323 DECL_FIELD_BIT_OFFSET (field));
5325 if (! host_integerp (offset, 0))
5326 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5329 else if (TREE_CODE (exp) == ARRAY_REF)
5331 tree index = TREE_OPERAND (exp, 1);
5332 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5333 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5334 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (exp));
5336 /* We assume all arrays have sizes that are a multiple of a byte.
5337 First subtract the lower bound, if any, in the type of the
5338 index, then convert to sizetype and multiply by the size of the
5339 array element. */
5340 if (low_bound != 0 && ! integer_zerop (low_bound))
5341 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5342 index, low_bound));
5344 /* If the index has a self-referential type, pass it to a
5345 WITH_RECORD_EXPR; if the component size is, pass our
5346 component to one. */
5347 if (! TREE_CONSTANT (index)
5348 && contains_placeholder_p (index))
5349 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5350 if (! TREE_CONSTANT (unit_size)
5351 && contains_placeholder_p (unit_size))
5352 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size,
5353 TREE_OPERAND (exp, 0));
5355 offset = size_binop (PLUS_EXPR, offset,
5356 size_binop (MULT_EXPR,
5357 convert (sizetype, index),
5358 unit_size));
5361 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5362 && ! ((TREE_CODE (exp) == NOP_EXPR
5363 || TREE_CODE (exp) == CONVERT_EXPR)
5364 && (TYPE_MODE (TREE_TYPE (exp))
5365 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5366 break;
5368 /* If any reference in the chain is volatile, the effect is volatile. */
5369 if (TREE_THIS_VOLATILE (exp))
5370 *pvolatilep = 1;
5372 /* If the offset is non-constant already, then we can't assume any
5373 alignment more than the alignment here. */
5374 if (! TREE_CONSTANT (offset))
5375 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5377 exp = TREE_OPERAND (exp, 0);
5380 if (DECL_P (exp))
5381 alignment = MIN (alignment, DECL_ALIGN (exp));
5382 else if (TREE_TYPE (exp) != 0)
5383 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5385 /* If OFFSET is constant, see if we can return the whole thing as a
5386 constant bit position. Otherwise, split it up. */
5387 if (host_integerp (offset, 0)
5388 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5389 bitsize_unit_node))
5390 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5391 && host_integerp (tem, 0))
5392 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5393 else
5394 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5396 *pmode = mode;
5397 *palignment = alignment;
5398 return exp;
5401 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5403 static enum memory_use_mode
5404 get_memory_usage_from_modifier (modifier)
5405 enum expand_modifier modifier;
5407 switch (modifier)
5409 case EXPAND_NORMAL:
5410 case EXPAND_SUM:
5411 return MEMORY_USE_RO;
5412 break;
5413 case EXPAND_MEMORY_USE_WO:
5414 return MEMORY_USE_WO;
5415 break;
5416 case EXPAND_MEMORY_USE_RW:
5417 return MEMORY_USE_RW;
5418 break;
5419 case EXPAND_MEMORY_USE_DONT:
5420 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5421 MEMORY_USE_DONT, because they are modifiers to a call of
5422 expand_expr in the ADDR_EXPR case of expand_expr. */
5423 case EXPAND_CONST_ADDRESS:
5424 case EXPAND_INITIALIZER:
5425 return MEMORY_USE_DONT;
5426 case EXPAND_MEMORY_USE_BAD:
5427 default:
5428 abort ();
5432 /* Given an rtx VALUE that may contain additions and multiplications, return
5433 an equivalent value that just refers to a register, memory, or constant.
5434 This is done by generating instructions to perform the arithmetic and
5435 returning a pseudo-register containing the value.
5437 The returned value may be a REG, SUBREG, MEM or constant. */
5440 force_operand (value, target)
5441 rtx value, target;
5443 register optab binoptab = 0;
5444 /* Use a temporary to force order of execution of calls to
5445 `force_operand'. */
5446 rtx tmp;
5447 register rtx op2;
5448 /* Use subtarget as the target for operand 0 of a binary operation. */
5449 register rtx subtarget = get_subtarget (target);
5451 /* Check for a PIC address load. */
5452 if (flag_pic
5453 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5454 && XEXP (value, 0) == pic_offset_table_rtx
5455 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5456 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5457 || GET_CODE (XEXP (value, 1)) == CONST))
5459 if (!subtarget)
5460 subtarget = gen_reg_rtx (GET_MODE (value));
5461 emit_move_insn (subtarget, value);
5462 return subtarget;
5465 if (GET_CODE (value) == PLUS)
5466 binoptab = add_optab;
5467 else if (GET_CODE (value) == MINUS)
5468 binoptab = sub_optab;
5469 else if (GET_CODE (value) == MULT)
5471 op2 = XEXP (value, 1);
5472 if (!CONSTANT_P (op2)
5473 && !(GET_CODE (op2) == REG && op2 != subtarget))
5474 subtarget = 0;
5475 tmp = force_operand (XEXP (value, 0), subtarget);
5476 return expand_mult (GET_MODE (value), tmp,
5477 force_operand (op2, NULL_RTX),
5478 target, 1);
5481 if (binoptab)
5483 op2 = XEXP (value, 1);
5484 if (!CONSTANT_P (op2)
5485 && !(GET_CODE (op2) == REG && op2 != subtarget))
5486 subtarget = 0;
5487 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5489 binoptab = add_optab;
5490 op2 = negate_rtx (GET_MODE (value), op2);
5493 /* Check for an addition with OP2 a constant integer and our first
5494 operand a PLUS of a virtual register and something else. In that
5495 case, we want to emit the sum of the virtual register and the
5496 constant first and then add the other value. This allows virtual
5497 register instantiation to simply modify the constant rather than
5498 creating another one around this addition. */
5499 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5500 && GET_CODE (XEXP (value, 0)) == PLUS
5501 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5502 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5503 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5505 rtx temp = expand_binop (GET_MODE (value), binoptab,
5506 XEXP (XEXP (value, 0), 0), op2,
5507 subtarget, 0, OPTAB_LIB_WIDEN);
5508 return expand_binop (GET_MODE (value), binoptab, temp,
5509 force_operand (XEXP (XEXP (value, 0), 1), 0),
5510 target, 0, OPTAB_LIB_WIDEN);
5513 tmp = force_operand (XEXP (value, 0), subtarget);
5514 return expand_binop (GET_MODE (value), binoptab, tmp,
5515 force_operand (op2, NULL_RTX),
5516 target, 0, OPTAB_LIB_WIDEN);
5517 /* We give UNSIGNEDP = 0 to expand_binop
5518 because the only operations we are expanding here are signed ones. */
5520 return value;
5523 /* Subroutine of expand_expr:
5524 save the non-copied parts (LIST) of an expr (LHS), and return a list
5525 which can restore these values to their previous values,
5526 should something modify their storage. */
5528 static tree
5529 save_noncopied_parts (lhs, list)
5530 tree lhs;
5531 tree list;
5533 tree tail;
5534 tree parts = 0;
5536 for (tail = list; tail; tail = TREE_CHAIN (tail))
5537 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5538 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5539 else
5541 tree part = TREE_VALUE (tail);
5542 tree part_type = TREE_TYPE (part);
5543 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5544 rtx target
5545 = assign_temp (build_qualified_type (part_type,
5546 (TYPE_QUALS (part_type)
5547 | TYPE_QUAL_CONST)),
5548 0, 1, 1);
5550 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5551 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5552 parts = tree_cons (to_be_saved,
5553 build (RTL_EXPR, part_type, NULL_TREE,
5554 (tree) target),
5555 parts);
5556 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5558 return parts;
5561 /* Subroutine of expand_expr:
5562 record the non-copied parts (LIST) of an expr (LHS), and return a list
5563 which specifies the initial values of these parts. */
5565 static tree
5566 init_noncopied_parts (lhs, list)
5567 tree lhs;
5568 tree list;
5570 tree tail;
5571 tree parts = 0;
5573 for (tail = list; tail; tail = TREE_CHAIN (tail))
5574 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5575 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5576 else if (TREE_PURPOSE (tail))
5578 tree part = TREE_VALUE (tail);
5579 tree part_type = TREE_TYPE (part);
5580 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5581 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5583 return parts;
5586 /* Subroutine of expand_expr: return nonzero iff there is no way that
5587 EXP can reference X, which is being modified. TOP_P is nonzero if this
5588 call is going to be used to determine whether we need a temporary
5589 for EXP, as opposed to a recursive call to this function.
5591 It is always safe for this routine to return zero since it merely
5592 searches for optimization opportunities. */
5595 safe_from_p (x, exp, top_p)
5596 rtx x;
5597 tree exp;
5598 int top_p;
5600 rtx exp_rtl = 0;
5601 int i, nops;
5602 static tree save_expr_list;
5604 if (x == 0
5605 /* If EXP has varying size, we MUST use a target since we currently
5606 have no way of allocating temporaries of variable size
5607 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5608 So we assume here that something at a higher level has prevented a
5609 clash. This is somewhat bogus, but the best we can do. Only
5610 do this when X is BLKmode and when we are at the top level. */
5611 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5612 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5613 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5614 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5615 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5616 != INTEGER_CST)
5617 && GET_MODE (x) == BLKmode)
5618 /* If X is in the outgoing argument area, it is always safe. */
5619 || (GET_CODE (x) == MEM
5620 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5621 || (GET_CODE (XEXP (x, 0)) == PLUS
5622 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5623 return 1;
5625 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5626 find the underlying pseudo. */
5627 if (GET_CODE (x) == SUBREG)
5629 x = SUBREG_REG (x);
5630 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5631 return 0;
5634 /* A SAVE_EXPR might appear many times in the expression passed to the
5635 top-level safe_from_p call, and if it has a complex subexpression,
5636 examining it multiple times could result in a combinatorial explosion.
5637 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5638 with optimization took about 28 minutes to compile -- even though it was
5639 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5640 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5641 we have processed. Note that the only test of top_p was above. */
5643 if (top_p)
5645 int rtn;
5646 tree t;
5648 save_expr_list = 0;
5650 rtn = safe_from_p (x, exp, 0);
5652 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5653 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5655 return rtn;
5658 /* Now look at our tree code and possibly recurse. */
5659 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5661 case 'd':
5662 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
5663 break;
5665 case 'c':
5666 return 1;
5668 case 'x':
5669 if (TREE_CODE (exp) == TREE_LIST)
5670 return ((TREE_VALUE (exp) == 0
5671 || safe_from_p (x, TREE_VALUE (exp), 0))
5672 && (TREE_CHAIN (exp) == 0
5673 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5674 else if (TREE_CODE (exp) == ERROR_MARK)
5675 return 1; /* An already-visited SAVE_EXPR? */
5676 else
5677 return 0;
5679 case '1':
5680 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5682 case '2':
5683 case '<':
5684 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5685 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5687 case 'e':
5688 case 'r':
5689 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5690 the expression. If it is set, we conflict iff we are that rtx or
5691 both are in memory. Otherwise, we check all operands of the
5692 expression recursively. */
5694 switch (TREE_CODE (exp))
5696 case ADDR_EXPR:
5697 return (staticp (TREE_OPERAND (exp, 0))
5698 || TREE_STATIC (exp)
5699 || safe_from_p (x, TREE_OPERAND (exp, 0), 0));
5701 case INDIRECT_REF:
5702 if (GET_CODE (x) == MEM
5703 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5704 get_alias_set (exp)))
5705 return 0;
5706 break;
5708 case CALL_EXPR:
5709 /* Assume that the call will clobber all hard registers and
5710 all of memory. */
5711 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5712 || GET_CODE (x) == MEM)
5713 return 0;
5714 break;
5716 case RTL_EXPR:
5717 /* If a sequence exists, we would have to scan every instruction
5718 in the sequence to see if it was safe. This is probably not
5719 worthwhile. */
5720 if (RTL_EXPR_SEQUENCE (exp))
5721 return 0;
5723 exp_rtl = RTL_EXPR_RTL (exp);
5724 break;
5726 case WITH_CLEANUP_EXPR:
5727 exp_rtl = RTL_EXPR_RTL (exp);
5728 break;
5730 case CLEANUP_POINT_EXPR:
5731 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5733 case SAVE_EXPR:
5734 exp_rtl = SAVE_EXPR_RTL (exp);
5735 if (exp_rtl)
5736 break;
5738 /* If we've already scanned this, don't do it again. Otherwise,
5739 show we've scanned it and record for clearing the flag if we're
5740 going on. */
5741 if (TREE_PRIVATE (exp))
5742 return 1;
5744 TREE_PRIVATE (exp) = 1;
5745 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5747 TREE_PRIVATE (exp) = 0;
5748 return 0;
5751 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5752 return 1;
5754 case BIND_EXPR:
5755 /* The only operand we look at is operand 1. The rest aren't
5756 part of the expression. */
5757 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5759 case METHOD_CALL_EXPR:
5760 /* This takes a rtx argument, but shouldn't appear here. */
5761 abort ();
5763 default:
5764 break;
5767 /* If we have an rtx, we do not need to scan our operands. */
5768 if (exp_rtl)
5769 break;
5771 nops = first_rtl_op (TREE_CODE (exp));
5772 for (i = 0; i < nops; i++)
5773 if (TREE_OPERAND (exp, i) != 0
5774 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5775 return 0;
5777 /* If this is a language-specific tree code, it may require
5778 special handling. */
5779 if ((unsigned int) TREE_CODE (exp)
5780 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5781 && lang_safe_from_p
5782 && !(*lang_safe_from_p) (x, exp))
5783 return 0;
5786 /* If we have an rtl, find any enclosed object. Then see if we conflict
5787 with it. */
5788 if (exp_rtl)
5790 if (GET_CODE (exp_rtl) == SUBREG)
5792 exp_rtl = SUBREG_REG (exp_rtl);
5793 if (GET_CODE (exp_rtl) == REG
5794 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5795 return 0;
5798 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5799 are memory and they conflict. */
5800 return ! (rtx_equal_p (x, exp_rtl)
5801 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5802 && true_dependence (exp_rtl, GET_MODE (x), x,
5803 rtx_addr_varies_p)));
5806 /* If we reach here, it is safe. */
5807 return 1;
5810 /* Subroutine of expand_expr: return nonzero iff EXP is an
5811 expression whose type is statically determinable. */
5813 static int
5814 fixed_type_p (exp)
5815 tree exp;
5817 if (TREE_CODE (exp) == PARM_DECL
5818 || TREE_CODE (exp) == VAR_DECL
5819 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5820 || TREE_CODE (exp) == COMPONENT_REF
5821 || TREE_CODE (exp) == ARRAY_REF)
5822 return 1;
5823 return 0;
5826 /* Subroutine of expand_expr: return rtx if EXP is a
5827 variable or parameter; else return 0. */
5829 static rtx
5830 var_rtx (exp)
5831 tree exp;
5833 STRIP_NOPS (exp);
5834 switch (TREE_CODE (exp))
5836 case PARM_DECL:
5837 case VAR_DECL:
5838 return DECL_RTL (exp);
5839 default:
5840 return 0;
5844 #ifdef MAX_INTEGER_COMPUTATION_MODE
5846 void
5847 check_max_integer_computation_mode (exp)
5848 tree exp;
5850 enum tree_code code;
5851 enum machine_mode mode;
5853 /* Strip any NOPs that don't change the mode. */
5854 STRIP_NOPS (exp);
5855 code = TREE_CODE (exp);
5857 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5858 if (code == NOP_EXPR
5859 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5860 return;
5862 /* First check the type of the overall operation. We need only look at
5863 unary, binary and relational operations. */
5864 if (TREE_CODE_CLASS (code) == '1'
5865 || TREE_CODE_CLASS (code) == '2'
5866 || TREE_CODE_CLASS (code) == '<')
5868 mode = TYPE_MODE (TREE_TYPE (exp));
5869 if (GET_MODE_CLASS (mode) == MODE_INT
5870 && mode > MAX_INTEGER_COMPUTATION_MODE)
5871 internal_error ("unsupported wide integer operation");
5874 /* Check operand of a unary op. */
5875 if (TREE_CODE_CLASS (code) == '1')
5877 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5878 if (GET_MODE_CLASS (mode) == MODE_INT
5879 && mode > MAX_INTEGER_COMPUTATION_MODE)
5880 internal_error ("unsupported wide integer operation");
5883 /* Check operands of a binary/comparison op. */
5884 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5886 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5887 if (GET_MODE_CLASS (mode) == MODE_INT
5888 && mode > MAX_INTEGER_COMPUTATION_MODE)
5889 internal_error ("unsupported wide integer operation");
5891 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5892 if (GET_MODE_CLASS (mode) == MODE_INT
5893 && mode > MAX_INTEGER_COMPUTATION_MODE)
5894 internal_error ("unsupported wide integer operation");
5897 #endif
5899 /* expand_expr: generate code for computing expression EXP.
5900 An rtx for the computed value is returned. The value is never null.
5901 In the case of a void EXP, const0_rtx is returned.
5903 The value may be stored in TARGET if TARGET is nonzero.
5904 TARGET is just a suggestion; callers must assume that
5905 the rtx returned may not be the same as TARGET.
5907 If TARGET is CONST0_RTX, it means that the value will be ignored.
5909 If TMODE is not VOIDmode, it suggests generating the
5910 result in mode TMODE. But this is done only when convenient.
5911 Otherwise, TMODE is ignored and the value generated in its natural mode.
5912 TMODE is just a suggestion; callers must assume that
5913 the rtx returned may not have mode TMODE.
5915 Note that TARGET may have neither TMODE nor MODE. In that case, it
5916 probably will not be used.
5918 If MODIFIER is EXPAND_SUM then when EXP is an addition
5919 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5920 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5921 products as above, or REG or MEM, or constant.
5922 Ordinarily in such cases we would output mul or add instructions
5923 and then return a pseudo reg containing the sum.
5925 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5926 it also marks a label as absolutely required (it can't be dead).
5927 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5928 This is used for outputting expressions used in initializers.
5930 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5931 with a constant address even if that address is not normally legitimate.
5932 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5935 expand_expr (exp, target, tmode, modifier)
5936 register tree exp;
5937 rtx target;
5938 enum machine_mode tmode;
5939 enum expand_modifier modifier;
5941 register rtx op0, op1, temp;
5942 tree type = TREE_TYPE (exp);
5943 int unsignedp = TREE_UNSIGNED (type);
5944 register enum machine_mode mode;
5945 register enum tree_code code = TREE_CODE (exp);
5946 optab this_optab;
5947 rtx subtarget, original_target;
5948 int ignore;
5949 tree context;
5950 /* Used by check-memory-usage to make modifier read only. */
5951 enum expand_modifier ro_modifier;
5953 /* Handle ERROR_MARK before anybody tries to access its type. */
5954 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
5956 op0 = CONST0_RTX (tmode);
5957 if (op0 != 0)
5958 return op0;
5959 return const0_rtx;
5962 mode = TYPE_MODE (type);
5963 /* Use subtarget as the target for operand 0 of a binary operation. */
5964 subtarget = get_subtarget (target);
5965 original_target = target;
5966 ignore = (target == const0_rtx
5967 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5968 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5969 || code == COND_EXPR)
5970 && TREE_CODE (type) == VOID_TYPE));
5972 /* Make a read-only version of the modifier. */
5973 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5974 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5975 ro_modifier = modifier;
5976 else
5977 ro_modifier = EXPAND_NORMAL;
5979 /* If we are going to ignore this result, we need only do something
5980 if there is a side-effect somewhere in the expression. If there
5981 is, short-circuit the most common cases here. Note that we must
5982 not call expand_expr with anything but const0_rtx in case this
5983 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5985 if (ignore)
5987 if (! TREE_SIDE_EFFECTS (exp))
5988 return const0_rtx;
5990 /* Ensure we reference a volatile object even if value is ignored, but
5991 don't do this if all we are doing is taking its address. */
5992 if (TREE_THIS_VOLATILE (exp)
5993 && TREE_CODE (exp) != FUNCTION_DECL
5994 && mode != VOIDmode && mode != BLKmode
5995 && modifier != EXPAND_CONST_ADDRESS)
5997 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5998 if (GET_CODE (temp) == MEM)
5999 temp = copy_to_reg (temp);
6000 return const0_rtx;
6003 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6004 || code == INDIRECT_REF || code == BUFFER_REF)
6005 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6006 VOIDmode, ro_modifier);
6007 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6008 || code == ARRAY_REF)
6010 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
6011 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
6012 return const0_rtx;
6014 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6015 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6016 /* If the second operand has no side effects, just evaluate
6017 the first. */
6018 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6019 VOIDmode, ro_modifier);
6020 else if (code == BIT_FIELD_REF)
6022 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
6023 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
6024 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
6025 return const0_rtx;
6028 target = 0;
6031 #ifdef MAX_INTEGER_COMPUTATION_MODE
6032 /* Only check stuff here if the mode we want is different from the mode
6033 of the expression; if it's the same, check_max_integer_computiation_mode
6034 will handle it. Do we really need to check this stuff at all? */
6036 if (target
6037 && GET_MODE (target) != mode
6038 && TREE_CODE (exp) != INTEGER_CST
6039 && TREE_CODE (exp) != PARM_DECL
6040 && TREE_CODE (exp) != ARRAY_REF
6041 && TREE_CODE (exp) != COMPONENT_REF
6042 && TREE_CODE (exp) != BIT_FIELD_REF
6043 && TREE_CODE (exp) != INDIRECT_REF
6044 && TREE_CODE (exp) != CALL_EXPR
6045 && TREE_CODE (exp) != VAR_DECL
6046 && TREE_CODE (exp) != RTL_EXPR)
6048 enum machine_mode mode = GET_MODE (target);
6050 if (GET_MODE_CLASS (mode) == MODE_INT
6051 && mode > MAX_INTEGER_COMPUTATION_MODE)
6052 internal_error ("unsupported wide integer operation");
6055 if (tmode != mode
6056 && TREE_CODE (exp) != INTEGER_CST
6057 && TREE_CODE (exp) != PARM_DECL
6058 && TREE_CODE (exp) != ARRAY_REF
6059 && TREE_CODE (exp) != COMPONENT_REF
6060 && TREE_CODE (exp) != BIT_FIELD_REF
6061 && TREE_CODE (exp) != INDIRECT_REF
6062 && TREE_CODE (exp) != VAR_DECL
6063 && TREE_CODE (exp) != CALL_EXPR
6064 && TREE_CODE (exp) != RTL_EXPR
6065 && GET_MODE_CLASS (tmode) == MODE_INT
6066 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6067 internal_error ("unsupported wide integer operation");
6069 check_max_integer_computation_mode (exp);
6070 #endif
6072 /* If will do cse, generate all results into pseudo registers
6073 since 1) that allows cse to find more things
6074 and 2) otherwise cse could produce an insn the machine
6075 cannot support. */
6077 if (! cse_not_expected && mode != BLKmode && target
6078 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
6079 target = subtarget;
6081 switch (code)
6083 case LABEL_DECL:
6085 tree function = decl_function_context (exp);
6086 /* Handle using a label in a containing function. */
6087 if (function != current_function_decl
6088 && function != inline_function_decl && function != 0)
6090 struct function *p = find_function_data (function);
6091 p->expr->x_forced_labels
6092 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6093 p->expr->x_forced_labels);
6095 else
6097 if (modifier == EXPAND_INITIALIZER)
6098 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6099 label_rtx (exp),
6100 forced_labels);
6103 temp = gen_rtx_MEM (FUNCTION_MODE,
6104 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6105 if (function != current_function_decl
6106 && function != inline_function_decl && function != 0)
6107 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6108 return temp;
6111 case PARM_DECL:
6112 if (DECL_RTL (exp) == 0)
6114 error_with_decl (exp, "prior parameter's size depends on `%s'");
6115 return CONST0_RTX (mode);
6118 /* ... fall through ... */
6120 case VAR_DECL:
6121 /* If a static var's type was incomplete when the decl was written,
6122 but the type is complete now, lay out the decl now. */
6123 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6124 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6126 layout_decl (exp, 0);
6127 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
6130 /* Although static-storage variables start off initialized, according to
6131 ANSI C, a memcpy could overwrite them with uninitialized values. So
6132 we check them too. This also lets us check for read-only variables
6133 accessed via a non-const declaration, in case it won't be detected
6134 any other way (e.g., in an embedded system or OS kernel without
6135 memory protection).
6137 Aggregates are not checked here; they're handled elsewhere. */
6138 if (cfun && current_function_check_memory_usage
6139 && code == VAR_DECL
6140 && GET_CODE (DECL_RTL (exp)) == MEM
6141 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6143 enum memory_use_mode memory_usage;
6144 memory_usage = get_memory_usage_from_modifier (modifier);
6146 in_check_memory_usage = 1;
6147 if (memory_usage != MEMORY_USE_DONT)
6148 emit_library_call (chkr_check_addr_libfunc,
6149 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6150 XEXP (DECL_RTL (exp), 0), Pmode,
6151 GEN_INT (int_size_in_bytes (type)),
6152 TYPE_MODE (sizetype),
6153 GEN_INT (memory_usage),
6154 TYPE_MODE (integer_type_node));
6155 in_check_memory_usage = 0;
6158 /* ... fall through ... */
6160 case FUNCTION_DECL:
6161 case RESULT_DECL:
6162 if (DECL_RTL (exp) == 0)
6163 abort ();
6165 /* Ensure variable marked as used even if it doesn't go through
6166 a parser. If it hasn't be used yet, write out an external
6167 definition. */
6168 if (! TREE_USED (exp))
6170 assemble_external (exp);
6171 TREE_USED (exp) = 1;
6174 /* Show we haven't gotten RTL for this yet. */
6175 temp = 0;
6177 /* Handle variables inherited from containing functions. */
6178 context = decl_function_context (exp);
6180 /* We treat inline_function_decl as an alias for the current function
6181 because that is the inline function whose vars, types, etc.
6182 are being merged into the current function.
6183 See expand_inline_function. */
6185 if (context != 0 && context != current_function_decl
6186 && context != inline_function_decl
6187 /* If var is static, we don't need a static chain to access it. */
6188 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6189 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6191 rtx addr;
6193 /* Mark as non-local and addressable. */
6194 DECL_NONLOCAL (exp) = 1;
6195 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6196 abort ();
6197 mark_addressable (exp);
6198 if (GET_CODE (DECL_RTL (exp)) != MEM)
6199 abort ();
6200 addr = XEXP (DECL_RTL (exp), 0);
6201 if (GET_CODE (addr) == MEM)
6202 addr = change_address (addr, Pmode,
6203 fix_lexical_addr (XEXP (addr, 0), exp));
6204 else
6205 addr = fix_lexical_addr (addr, exp);
6207 temp = change_address (DECL_RTL (exp), mode, addr);
6210 /* This is the case of an array whose size is to be determined
6211 from its initializer, while the initializer is still being parsed.
6212 See expand_decl. */
6214 else if (GET_CODE (DECL_RTL (exp)) == MEM
6215 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6216 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
6217 XEXP (DECL_RTL (exp), 0));
6219 /* If DECL_RTL is memory, we are in the normal case and either
6220 the address is not valid or it is not a register and -fforce-addr
6221 is specified, get the address into a register. */
6223 else if (GET_CODE (DECL_RTL (exp)) == MEM
6224 && modifier != EXPAND_CONST_ADDRESS
6225 && modifier != EXPAND_SUM
6226 && modifier != EXPAND_INITIALIZER
6227 && (! memory_address_p (DECL_MODE (exp),
6228 XEXP (DECL_RTL (exp), 0))
6229 || (flag_force_addr
6230 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6231 temp = change_address (DECL_RTL (exp), VOIDmode,
6232 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6234 /* If we got something, return it. But first, set the alignment
6235 if the address is a register. */
6236 if (temp != 0)
6238 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6239 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6241 return temp;
6244 /* If the mode of DECL_RTL does not match that of the decl, it
6245 must be a promoted value. We return a SUBREG of the wanted mode,
6246 but mark it so that we know that it was already extended. */
6248 if (GET_CODE (DECL_RTL (exp)) == REG
6249 && GET_MODE (DECL_RTL (exp)) != mode)
6251 /* Get the signedness used for this variable. Ensure we get the
6252 same mode we got when the variable was declared. */
6253 if (GET_MODE (DECL_RTL (exp))
6254 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6255 abort ();
6257 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
6258 SUBREG_PROMOTED_VAR_P (temp) = 1;
6259 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6260 return temp;
6263 return DECL_RTL (exp);
6265 case INTEGER_CST:
6266 return immed_double_const (TREE_INT_CST_LOW (exp),
6267 TREE_INT_CST_HIGH (exp), mode);
6269 case CONST_DECL:
6270 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6271 EXPAND_MEMORY_USE_BAD);
6273 case REAL_CST:
6274 /* If optimized, generate immediate CONST_DOUBLE
6275 which will be turned into memory by reload if necessary.
6277 We used to force a register so that loop.c could see it. But
6278 this does not allow gen_* patterns to perform optimizations with
6279 the constants. It also produces two insns in cases like "x = 1.0;".
6280 On most machines, floating-point constants are not permitted in
6281 many insns, so we'd end up copying it to a register in any case.
6283 Now, we do the copying in expand_binop, if appropriate. */
6284 return immed_real_const (exp);
6286 case COMPLEX_CST:
6287 case STRING_CST:
6288 if (! TREE_CST_RTL (exp))
6289 output_constant_def (exp, 1);
6291 /* TREE_CST_RTL probably contains a constant address.
6292 On RISC machines where a constant address isn't valid,
6293 make some insns to get that address into a register. */
6294 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6295 && modifier != EXPAND_CONST_ADDRESS
6296 && modifier != EXPAND_INITIALIZER
6297 && modifier != EXPAND_SUM
6298 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6299 || (flag_force_addr
6300 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6301 return change_address (TREE_CST_RTL (exp), VOIDmode,
6302 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6303 return TREE_CST_RTL (exp);
6305 case EXPR_WITH_FILE_LOCATION:
6307 rtx to_return;
6308 const char *saved_input_filename = input_filename;
6309 int saved_lineno = lineno;
6310 input_filename = EXPR_WFL_FILENAME (exp);
6311 lineno = EXPR_WFL_LINENO (exp);
6312 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6313 emit_line_note (input_filename, lineno);
6314 /* Possibly avoid switching back and force here. */
6315 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6316 input_filename = saved_input_filename;
6317 lineno = saved_lineno;
6318 return to_return;
6321 case SAVE_EXPR:
6322 context = decl_function_context (exp);
6324 /* If this SAVE_EXPR was at global context, assume we are an
6325 initialization function and move it into our context. */
6326 if (context == 0)
6327 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6329 /* We treat inline_function_decl as an alias for the current function
6330 because that is the inline function whose vars, types, etc.
6331 are being merged into the current function.
6332 See expand_inline_function. */
6333 if (context == current_function_decl || context == inline_function_decl)
6334 context = 0;
6336 /* If this is non-local, handle it. */
6337 if (context)
6339 /* The following call just exists to abort if the context is
6340 not of a containing function. */
6341 find_function_data (context);
6343 temp = SAVE_EXPR_RTL (exp);
6344 if (temp && GET_CODE (temp) == REG)
6346 put_var_into_stack (exp);
6347 temp = SAVE_EXPR_RTL (exp);
6349 if (temp == 0 || GET_CODE (temp) != MEM)
6350 abort ();
6351 return change_address (temp, mode,
6352 fix_lexical_addr (XEXP (temp, 0), exp));
6354 if (SAVE_EXPR_RTL (exp) == 0)
6356 if (mode == VOIDmode)
6357 temp = const0_rtx;
6358 else
6359 temp = assign_temp (build_qualified_type (type,
6360 (TYPE_QUALS (type)
6361 | TYPE_QUAL_CONST)),
6362 3, 0, 0);
6364 SAVE_EXPR_RTL (exp) = temp;
6365 if (!optimize && GET_CODE (temp) == REG)
6366 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6367 save_expr_regs);
6369 /* If the mode of TEMP does not match that of the expression, it
6370 must be a promoted value. We pass store_expr a SUBREG of the
6371 wanted mode but mark it so that we know that it was already
6372 extended. Note that `unsignedp' was modified above in
6373 this case. */
6375 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6377 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6378 SUBREG_PROMOTED_VAR_P (temp) = 1;
6379 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6382 if (temp == const0_rtx)
6383 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6384 EXPAND_MEMORY_USE_BAD);
6385 else
6386 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6388 TREE_USED (exp) = 1;
6391 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6392 must be a promoted value. We return a SUBREG of the wanted mode,
6393 but mark it so that we know that it was already extended. */
6395 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6396 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6398 /* Compute the signedness and make the proper SUBREG. */
6399 promote_mode (type, mode, &unsignedp, 0);
6400 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6401 SUBREG_PROMOTED_VAR_P (temp) = 1;
6402 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6403 return temp;
6406 return SAVE_EXPR_RTL (exp);
6408 case UNSAVE_EXPR:
6410 rtx temp;
6411 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6412 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6413 return temp;
6416 case PLACEHOLDER_EXPR:
6418 tree placeholder_expr;
6420 /* If there is an object on the head of the placeholder list,
6421 see if some object in it of type TYPE or a pointer to it. For
6422 further information, see tree.def. */
6423 for (placeholder_expr = placeholder_list;
6424 placeholder_expr != 0;
6425 placeholder_expr = TREE_CHAIN (placeholder_expr))
6427 tree need_type = TYPE_MAIN_VARIANT (type);
6428 tree object = 0;
6429 tree old_list = placeholder_list;
6430 tree elt;
6432 /* Find the outermost reference that is of the type we want.
6433 If none, see if any object has a type that is a pointer to
6434 the type we want. */
6435 for (elt = TREE_PURPOSE (placeholder_expr);
6436 elt != 0 && object == 0;
6438 = ((TREE_CODE (elt) == COMPOUND_EXPR
6439 || TREE_CODE (elt) == COND_EXPR)
6440 ? TREE_OPERAND (elt, 1)
6441 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6442 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6443 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6444 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6445 ? TREE_OPERAND (elt, 0) : 0))
6446 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6447 object = elt;
6449 for (elt = TREE_PURPOSE (placeholder_expr);
6450 elt != 0 && object == 0;
6452 = ((TREE_CODE (elt) == COMPOUND_EXPR
6453 || TREE_CODE (elt) == COND_EXPR)
6454 ? TREE_OPERAND (elt, 1)
6455 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6456 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6457 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6458 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6459 ? TREE_OPERAND (elt, 0) : 0))
6460 if (POINTER_TYPE_P (TREE_TYPE (elt))
6461 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6462 == need_type))
6463 object = build1 (INDIRECT_REF, need_type, elt);
6465 if (object != 0)
6467 /* Expand this object skipping the list entries before
6468 it was found in case it is also a PLACEHOLDER_EXPR.
6469 In that case, we want to translate it using subsequent
6470 entries. */
6471 placeholder_list = TREE_CHAIN (placeholder_expr);
6472 temp = expand_expr (object, original_target, tmode,
6473 ro_modifier);
6474 placeholder_list = old_list;
6475 return temp;
6480 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6481 abort ();
6483 case WITH_RECORD_EXPR:
6484 /* Put the object on the placeholder list, expand our first operand,
6485 and pop the list. */
6486 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6487 placeholder_list);
6488 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6489 tmode, ro_modifier);
6490 placeholder_list = TREE_CHAIN (placeholder_list);
6491 return target;
6493 case GOTO_EXPR:
6494 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6495 expand_goto (TREE_OPERAND (exp, 0));
6496 else
6497 expand_computed_goto (TREE_OPERAND (exp, 0));
6498 return const0_rtx;
6500 case EXIT_EXPR:
6501 expand_exit_loop_if_false (NULL_PTR,
6502 invert_truthvalue (TREE_OPERAND (exp, 0)));
6503 return const0_rtx;
6505 case LABELED_BLOCK_EXPR:
6506 if (LABELED_BLOCK_BODY (exp))
6507 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6508 /* Should perhaps use expand_label, but this is simpler and safer. */
6509 do_pending_stack_adjust ();
6510 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6511 return const0_rtx;
6513 case EXIT_BLOCK_EXPR:
6514 if (EXIT_BLOCK_RETURN (exp))
6515 sorry ("returned value in block_exit_expr");
6516 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6517 return const0_rtx;
6519 case LOOP_EXPR:
6520 push_temp_slots ();
6521 expand_start_loop (1);
6522 expand_expr_stmt (TREE_OPERAND (exp, 0));
6523 expand_end_loop ();
6524 pop_temp_slots ();
6526 return const0_rtx;
6528 case BIND_EXPR:
6530 tree vars = TREE_OPERAND (exp, 0);
6531 int vars_need_expansion = 0;
6533 /* Need to open a binding contour here because
6534 if there are any cleanups they must be contained here. */
6535 expand_start_bindings (2);
6537 /* Mark the corresponding BLOCK for output in its proper place. */
6538 if (TREE_OPERAND (exp, 2) != 0
6539 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6540 insert_block (TREE_OPERAND (exp, 2));
6542 /* If VARS have not yet been expanded, expand them now. */
6543 while (vars)
6545 if (!DECL_RTL_SET_P (vars))
6547 vars_need_expansion = 1;
6548 expand_decl (vars);
6550 expand_decl_init (vars);
6551 vars = TREE_CHAIN (vars);
6554 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6556 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6558 return temp;
6561 case RTL_EXPR:
6562 if (RTL_EXPR_SEQUENCE (exp))
6564 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6565 abort ();
6566 emit_insns (RTL_EXPR_SEQUENCE (exp));
6567 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6569 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6570 free_temps_for_rtl_expr (exp);
6571 return RTL_EXPR_RTL (exp);
6573 case CONSTRUCTOR:
6574 /* If we don't need the result, just ensure we evaluate any
6575 subexpressions. */
6576 if (ignore)
6578 tree elt;
6579 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6580 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6581 EXPAND_MEMORY_USE_BAD);
6582 return const0_rtx;
6585 /* All elts simple constants => refer to a constant in memory. But
6586 if this is a non-BLKmode mode, let it store a field at a time
6587 since that should make a CONST_INT or CONST_DOUBLE when we
6588 fold. Likewise, if we have a target we can use, it is best to
6589 store directly into the target unless the type is large enough
6590 that memcpy will be used. If we are making an initializer and
6591 all operands are constant, put it in memory as well. */
6592 else if ((TREE_STATIC (exp)
6593 && ((mode == BLKmode
6594 && ! (target != 0 && safe_from_p (target, exp, 1)))
6595 || TREE_ADDRESSABLE (exp)
6596 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6597 && (! MOVE_BY_PIECES_P
6598 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6599 TYPE_ALIGN (type)))
6600 && ! mostly_zeros_p (exp))))
6601 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6603 rtx constructor = output_constant_def (exp, 1);
6605 if (modifier != EXPAND_CONST_ADDRESS
6606 && modifier != EXPAND_INITIALIZER
6607 && modifier != EXPAND_SUM
6608 && (! memory_address_p (GET_MODE (constructor),
6609 XEXP (constructor, 0))
6610 || (flag_force_addr
6611 && GET_CODE (XEXP (constructor, 0)) != REG)))
6612 constructor = change_address (constructor, VOIDmode,
6613 XEXP (constructor, 0));
6614 return constructor;
6616 else
6618 /* Handle calls that pass values in multiple non-contiguous
6619 locations. The Irix 6 ABI has examples of this. */
6620 if (target == 0 || ! safe_from_p (target, exp, 1)
6621 || GET_CODE (target) == PARALLEL)
6622 target
6623 = assign_temp (build_qualified_type (type,
6624 (TYPE_QUALS (type)
6625 | (TREE_READONLY (exp)
6626 * TYPE_QUAL_CONST))),
6627 TREE_ADDRESSABLE (exp), 1, 1);
6629 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6630 int_size_in_bytes (TREE_TYPE (exp)));
6631 return target;
6634 case INDIRECT_REF:
6636 tree exp1 = TREE_OPERAND (exp, 0);
6637 tree index;
6638 tree string = string_constant (exp1, &index);
6640 /* Try to optimize reads from const strings. */
6641 if (string
6642 && TREE_CODE (string) == STRING_CST
6643 && TREE_CODE (index) == INTEGER_CST
6644 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6645 && GET_MODE_CLASS (mode) == MODE_INT
6646 && GET_MODE_SIZE (mode) == 1
6647 && modifier != EXPAND_MEMORY_USE_WO)
6648 return
6649 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6651 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6652 op0 = memory_address (mode, op0);
6654 if (cfun && current_function_check_memory_usage
6655 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6657 enum memory_use_mode memory_usage;
6658 memory_usage = get_memory_usage_from_modifier (modifier);
6660 if (memory_usage != MEMORY_USE_DONT)
6662 in_check_memory_usage = 1;
6663 emit_library_call (chkr_check_addr_libfunc,
6664 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6665 Pmode, GEN_INT (int_size_in_bytes (type)),
6666 TYPE_MODE (sizetype),
6667 GEN_INT (memory_usage),
6668 TYPE_MODE (integer_type_node));
6669 in_check_memory_usage = 0;
6673 temp = gen_rtx_MEM (mode, op0);
6674 set_mem_attributes (temp, exp, 0);
6676 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6677 here, because, in C and C++, the fact that a location is accessed
6678 through a pointer to const does not mean that the value there can
6679 never change. Languages where it can never change should
6680 also set TREE_STATIC. */
6681 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6683 /* If we are writing to this object and its type is a record with
6684 readonly fields, we must mark it as readonly so it will
6685 conflict with readonly references to those fields. */
6686 if (modifier == EXPAND_MEMORY_USE_WO && readonly_fields_p (type))
6687 RTX_UNCHANGING_P (temp) = 1;
6689 return temp;
6692 case ARRAY_REF:
6693 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6694 abort ();
6697 tree array = TREE_OPERAND (exp, 0);
6698 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6699 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6700 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6701 HOST_WIDE_INT i;
6703 /* Optimize the special-case of a zero lower bound.
6705 We convert the low_bound to sizetype to avoid some problems
6706 with constant folding. (E.g. suppose the lower bound is 1,
6707 and its mode is QI. Without the conversion, (ARRAY
6708 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6709 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6711 if (! integer_zerop (low_bound))
6712 index = size_diffop (index, convert (sizetype, low_bound));
6714 /* Fold an expression like: "foo"[2].
6715 This is not done in fold so it won't happen inside &.
6716 Don't fold if this is for wide characters since it's too
6717 difficult to do correctly and this is a very rare case. */
6719 if (TREE_CODE (array) == STRING_CST
6720 && TREE_CODE (index) == INTEGER_CST
6721 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6722 && GET_MODE_CLASS (mode) == MODE_INT
6723 && GET_MODE_SIZE (mode) == 1)
6724 return
6725 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6727 /* If this is a constant index into a constant array,
6728 just get the value from the array. Handle both the cases when
6729 we have an explicit constructor and when our operand is a variable
6730 that was declared const. */
6732 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6733 && TREE_CODE (index) == INTEGER_CST
6734 && 0 > compare_tree_int (index,
6735 list_length (CONSTRUCTOR_ELTS
6736 (TREE_OPERAND (exp, 0)))))
6738 tree elem;
6740 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6741 i = TREE_INT_CST_LOW (index);
6742 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6745 if (elem)
6746 return expand_expr (fold (TREE_VALUE (elem)), target,
6747 tmode, ro_modifier);
6750 else if (optimize >= 1
6751 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6752 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6753 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6755 if (TREE_CODE (index) == INTEGER_CST)
6757 tree init = DECL_INITIAL (array);
6759 if (TREE_CODE (init) == CONSTRUCTOR)
6761 tree elem;
6763 for (elem = CONSTRUCTOR_ELTS (init);
6764 (elem
6765 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6766 elem = TREE_CHAIN (elem))
6769 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6770 return expand_expr (fold (TREE_VALUE (elem)), target,
6771 tmode, ro_modifier);
6773 else if (TREE_CODE (init) == STRING_CST
6774 && 0 > compare_tree_int (index,
6775 TREE_STRING_LENGTH (init)))
6777 tree type = TREE_TYPE (TREE_TYPE (init));
6778 enum machine_mode mode = TYPE_MODE (type);
6780 if (GET_MODE_CLASS (mode) == MODE_INT
6781 && GET_MODE_SIZE (mode) == 1)
6782 return (GEN_INT
6783 (TREE_STRING_POINTER
6784 (init)[TREE_INT_CST_LOW (index)]));
6789 /* Fall through. */
6791 case COMPONENT_REF:
6792 case BIT_FIELD_REF:
6793 /* If the operand is a CONSTRUCTOR, we can just extract the
6794 appropriate field if it is present. Don't do this if we have
6795 already written the data since we want to refer to that copy
6796 and varasm.c assumes that's what we'll do. */
6797 if (code != ARRAY_REF
6798 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6799 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6801 tree elt;
6803 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6804 elt = TREE_CHAIN (elt))
6805 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6806 /* We can normally use the value of the field in the
6807 CONSTRUCTOR. However, if this is a bitfield in
6808 an integral mode that we can fit in a HOST_WIDE_INT,
6809 we must mask only the number of bits in the bitfield,
6810 since this is done implicitly by the constructor. If
6811 the bitfield does not meet either of those conditions,
6812 we can't do this optimization. */
6813 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6814 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6815 == MODE_INT)
6816 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6817 <= HOST_BITS_PER_WIDE_INT))))
6819 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6820 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6822 HOST_WIDE_INT bitsize
6823 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6825 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6827 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6828 op0 = expand_and (op0, op1, target);
6830 else
6832 enum machine_mode imode
6833 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6834 tree count
6835 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6838 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6839 target, 0);
6840 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6841 target, 0);
6845 return op0;
6850 enum machine_mode mode1;
6851 HOST_WIDE_INT bitsize, bitpos;
6852 tree offset;
6853 int volatilep = 0;
6854 unsigned int alignment;
6855 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6856 &mode1, &unsignedp, &volatilep,
6857 &alignment);
6859 /* If we got back the original object, something is wrong. Perhaps
6860 we are evaluating an expression too early. In any event, don't
6861 infinitely recurse. */
6862 if (tem == exp)
6863 abort ();
6865 /* If TEM's type is a union of variable size, pass TARGET to the inner
6866 computation, since it will need a temporary and TARGET is known
6867 to have to do. This occurs in unchecked conversion in Ada. */
6869 op0 = expand_expr (tem,
6870 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6871 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6872 != INTEGER_CST)
6873 ? target : NULL_RTX),
6874 VOIDmode,
6875 (modifier == EXPAND_INITIALIZER
6876 || modifier == EXPAND_CONST_ADDRESS)
6877 ? modifier : EXPAND_NORMAL);
6879 /* If this is a constant, put it into a register if it is a
6880 legitimate constant and OFFSET is 0 and memory if it isn't. */
6881 if (CONSTANT_P (op0))
6883 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6884 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6885 && offset == 0)
6886 op0 = force_reg (mode, op0);
6887 else
6888 op0 = validize_mem (force_const_mem (mode, op0));
6891 if (offset != 0)
6893 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6895 /* If this object is in memory, put it into a register.
6896 This case can't occur in C, but can in Ada if we have
6897 unchecked conversion of an expression from a scalar type to
6898 an array or record type. */
6899 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6900 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6902 tree nt = build_qualified_type (TREE_TYPE (tem),
6903 (TYPE_QUALS (TREE_TYPE (tem))
6904 | TYPE_QUAL_CONST));
6905 rtx memloc = assign_temp (nt, 1, 1, 1);
6907 mark_temp_addr_taken (memloc);
6908 emit_move_insn (memloc, op0);
6909 op0 = memloc;
6912 if (GET_CODE (op0) != MEM)
6913 abort ();
6915 if (GET_MODE (offset_rtx) != ptr_mode)
6917 #ifdef POINTERS_EXTEND_UNSIGNED
6918 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6919 #else
6920 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6921 #endif
6924 /* A constant address in OP0 can have VOIDmode, we must not try
6925 to call force_reg for that case. Avoid that case. */
6926 if (GET_CODE (op0) == MEM
6927 && GET_MODE (op0) == BLKmode
6928 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6929 && bitsize != 0
6930 && (bitpos % bitsize) == 0
6931 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6932 && alignment == GET_MODE_ALIGNMENT (mode1))
6934 rtx temp = change_address (op0, mode1,
6935 plus_constant (XEXP (op0, 0),
6936 (bitpos /
6937 BITS_PER_UNIT)));
6938 if (GET_CODE (XEXP (temp, 0)) == REG)
6939 op0 = temp;
6940 else
6941 op0 = change_address (op0, mode1,
6942 force_reg (GET_MODE (XEXP (temp, 0)),
6943 XEXP (temp, 0)));
6944 bitpos = 0;
6947 op0 = change_address (op0, VOIDmode,
6948 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6949 force_reg (ptr_mode,
6950 offset_rtx)));
6953 /* Don't forget about volatility even if this is a bitfield. */
6954 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6956 op0 = copy_rtx (op0);
6957 MEM_VOLATILE_P (op0) = 1;
6960 /* Check the access. */
6961 if (cfun != 0 && current_function_check_memory_usage
6962 && GET_CODE (op0) == MEM)
6964 enum memory_use_mode memory_usage;
6965 memory_usage = get_memory_usage_from_modifier (modifier);
6967 if (memory_usage != MEMORY_USE_DONT)
6969 rtx to;
6970 int size;
6972 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6973 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6975 /* Check the access right of the pointer. */
6976 in_check_memory_usage = 1;
6977 if (size > BITS_PER_UNIT)
6978 emit_library_call (chkr_check_addr_libfunc,
6979 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
6980 Pmode, GEN_INT (size / BITS_PER_UNIT),
6981 TYPE_MODE (sizetype),
6982 GEN_INT (memory_usage),
6983 TYPE_MODE (integer_type_node));
6984 in_check_memory_usage = 0;
6988 /* In cases where an aligned union has an unaligned object
6989 as a field, we might be extracting a BLKmode value from
6990 an integer-mode (e.g., SImode) object. Handle this case
6991 by doing the extract into an object as wide as the field
6992 (which we know to be the width of a basic mode), then
6993 storing into memory, and changing the mode to BLKmode.
6994 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6995 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6996 if (mode1 == VOIDmode
6997 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6998 || (modifier != EXPAND_CONST_ADDRESS
6999 && modifier != EXPAND_INITIALIZER
7000 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
7001 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7002 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
7003 /* If the field isn't aligned enough to fetch as a memref,
7004 fetch it as a bit field. */
7005 || (mode1 != BLKmode
7006 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
7007 && ((TYPE_ALIGN (TREE_TYPE (tem))
7008 < GET_MODE_ALIGNMENT (mode))
7009 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7010 /* If the type and the field are a constant size and the
7011 size of the type isn't the same size as the bitfield,
7012 we must use bitfield operations. */
7013 || ((bitsize >= 0
7014 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7015 == INTEGER_CST)
7016 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7017 bitsize)))))
7018 || (modifier != EXPAND_CONST_ADDRESS
7019 && modifier != EXPAND_INITIALIZER
7020 && mode == BLKmode
7021 && SLOW_UNALIGNED_ACCESS (mode, alignment)
7022 && (TYPE_ALIGN (type) > alignment
7023 || bitpos % TYPE_ALIGN (type) != 0)))
7025 enum machine_mode ext_mode = mode;
7027 if (ext_mode == BLKmode
7028 && ! (target != 0 && GET_CODE (op0) == MEM
7029 && GET_CODE (target) == MEM
7030 && bitpos % BITS_PER_UNIT == 0))
7031 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7033 if (ext_mode == BLKmode)
7035 /* In this case, BITPOS must start at a byte boundary and
7036 TARGET, if specified, must be a MEM. */
7037 if (GET_CODE (op0) != MEM
7038 || (target != 0 && GET_CODE (target) != MEM)
7039 || bitpos % BITS_PER_UNIT != 0)
7040 abort ();
7042 op0 = change_address (op0, VOIDmode,
7043 plus_constant (XEXP (op0, 0),
7044 bitpos / BITS_PER_UNIT));
7045 if (target == 0)
7046 target = assign_temp (type, 0, 1, 1);
7048 emit_block_move (target, op0,
7049 bitsize == -1 ? expr_size (exp)
7050 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7051 / BITS_PER_UNIT),
7052 BITS_PER_UNIT);
7054 return target;
7057 op0 = validize_mem (op0);
7059 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7060 mark_reg_pointer (XEXP (op0, 0), alignment);
7062 op0 = extract_bit_field (op0, bitsize, bitpos,
7063 unsignedp, target, ext_mode, ext_mode,
7064 alignment,
7065 int_size_in_bytes (TREE_TYPE (tem)));
7067 /* If the result is a record type and BITSIZE is narrower than
7068 the mode of OP0, an integral mode, and this is a big endian
7069 machine, we must put the field into the high-order bits. */
7070 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7071 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7072 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
7073 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7074 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7075 - bitsize),
7076 op0, 1);
7078 if (mode == BLKmode)
7080 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
7081 TYPE_QUAL_CONST);
7082 rtx new = assign_temp (nt, 0, 1, 1);
7084 emit_move_insn (new, op0);
7085 op0 = copy_rtx (new);
7086 PUT_MODE (op0, BLKmode);
7089 return op0;
7092 /* If the result is BLKmode, use that to access the object
7093 now as well. */
7094 if (mode == BLKmode)
7095 mode1 = BLKmode;
7097 /* Get a reference to just this component. */
7098 if (modifier == EXPAND_CONST_ADDRESS
7099 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7101 rtx new = gen_rtx_MEM (mode1,
7102 plus_constant (XEXP (op0, 0),
7103 (bitpos / BITS_PER_UNIT)));
7105 MEM_COPY_ATTRIBUTES (new, op0);
7106 op0 = new;
7108 else
7109 op0 = change_address (op0, mode1,
7110 plus_constant (XEXP (op0, 0),
7111 (bitpos / BITS_PER_UNIT)));
7113 set_mem_attributes (op0, exp, 0);
7114 if (GET_CODE (XEXP (op0, 0)) == REG)
7115 mark_reg_pointer (XEXP (op0, 0), alignment);
7117 MEM_VOLATILE_P (op0) |= volatilep;
7118 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7119 || modifier == EXPAND_CONST_ADDRESS
7120 || modifier == EXPAND_INITIALIZER)
7121 return op0;
7122 else if (target == 0)
7123 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7125 convert_move (target, op0, unsignedp);
7126 return target;
7129 /* Intended for a reference to a buffer of a file-object in Pascal.
7130 But it's not certain that a special tree code will really be
7131 necessary for these. INDIRECT_REF might work for them. */
7132 case BUFFER_REF:
7133 abort ();
7135 case IN_EXPR:
7137 /* Pascal set IN expression.
7139 Algorithm:
7140 rlo = set_low - (set_low%bits_per_word);
7141 the_word = set [ (index - rlo)/bits_per_word ];
7142 bit_index = index % bits_per_word;
7143 bitmask = 1 << bit_index;
7144 return !!(the_word & bitmask); */
7146 tree set = TREE_OPERAND (exp, 0);
7147 tree index = TREE_OPERAND (exp, 1);
7148 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7149 tree set_type = TREE_TYPE (set);
7150 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7151 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7152 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7153 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7154 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7155 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7156 rtx setaddr = XEXP (setval, 0);
7157 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7158 rtx rlow;
7159 rtx diff, quo, rem, addr, bit, result;
7161 /* If domain is empty, answer is no. Likewise if index is constant
7162 and out of bounds. */
7163 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7164 && TREE_CODE (set_low_bound) == INTEGER_CST
7165 && tree_int_cst_lt (set_high_bound, set_low_bound))
7166 || (TREE_CODE (index) == INTEGER_CST
7167 && TREE_CODE (set_low_bound) == INTEGER_CST
7168 && tree_int_cst_lt (index, set_low_bound))
7169 || (TREE_CODE (set_high_bound) == INTEGER_CST
7170 && TREE_CODE (index) == INTEGER_CST
7171 && tree_int_cst_lt (set_high_bound, index))))
7172 return const0_rtx;
7174 if (target == 0)
7175 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7177 /* If we get here, we have to generate the code for both cases
7178 (in range and out of range). */
7180 op0 = gen_label_rtx ();
7181 op1 = gen_label_rtx ();
7183 if (! (GET_CODE (index_val) == CONST_INT
7184 && GET_CODE (lo_r) == CONST_INT))
7186 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7187 GET_MODE (index_val), iunsignedp, 0, op1);
7190 if (! (GET_CODE (index_val) == CONST_INT
7191 && GET_CODE (hi_r) == CONST_INT))
7193 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7194 GET_MODE (index_val), iunsignedp, 0, op1);
7197 /* Calculate the element number of bit zero in the first word
7198 of the set. */
7199 if (GET_CODE (lo_r) == CONST_INT)
7200 rlow = GEN_INT (INTVAL (lo_r)
7201 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7202 else
7203 rlow = expand_binop (index_mode, and_optab, lo_r,
7204 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7205 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7207 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7208 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7210 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7211 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7212 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7213 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7215 addr = memory_address (byte_mode,
7216 expand_binop (index_mode, add_optab, diff,
7217 setaddr, NULL_RTX, iunsignedp,
7218 OPTAB_LIB_WIDEN));
7220 /* Extract the bit we want to examine. */
7221 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7222 gen_rtx_MEM (byte_mode, addr),
7223 make_tree (TREE_TYPE (index), rem),
7224 NULL_RTX, 1);
7225 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7226 GET_MODE (target) == byte_mode ? target : 0,
7227 1, OPTAB_LIB_WIDEN);
7229 if (result != target)
7230 convert_move (target, result, 1);
7232 /* Output the code to handle the out-of-range case. */
7233 emit_jump (op0);
7234 emit_label (op1);
7235 emit_move_insn (target, const0_rtx);
7236 emit_label (op0);
7237 return target;
7240 case WITH_CLEANUP_EXPR:
7241 if (RTL_EXPR_RTL (exp) == 0)
7243 RTL_EXPR_RTL (exp)
7244 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7245 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
7247 /* That's it for this cleanup. */
7248 TREE_OPERAND (exp, 2) = 0;
7250 return RTL_EXPR_RTL (exp);
7252 case CLEANUP_POINT_EXPR:
7254 /* Start a new binding layer that will keep track of all cleanup
7255 actions to be performed. */
7256 expand_start_bindings (2);
7258 target_temp_slot_level = temp_slot_level;
7260 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7261 /* If we're going to use this value, load it up now. */
7262 if (! ignore)
7263 op0 = force_not_mem (op0);
7264 preserve_temp_slots (op0);
7265 expand_end_bindings (NULL_TREE, 0, 0);
7267 return op0;
7269 case CALL_EXPR:
7270 /* Check for a built-in function. */
7271 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7272 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7273 == FUNCTION_DECL)
7274 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7276 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7277 == BUILT_IN_FRONTEND)
7278 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7279 else
7280 return expand_builtin (exp, target, subtarget, tmode, ignore);
7283 return expand_call (exp, target, ignore);
7285 case NON_LVALUE_EXPR:
7286 case NOP_EXPR:
7287 case CONVERT_EXPR:
7288 case REFERENCE_EXPR:
7289 if (TREE_OPERAND (exp, 0) == error_mark_node)
7290 return const0_rtx;
7292 if (TREE_CODE (type) == UNION_TYPE)
7294 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7296 /* If both input and output are BLKmode, this conversion
7297 isn't actually doing anything unless we need to make the
7298 alignment stricter. */
7299 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7300 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7301 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7302 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7303 modifier);
7305 if (target == 0)
7306 target = assign_temp (type, 0, 1, 1);
7308 if (GET_CODE (target) == MEM)
7309 /* Store data into beginning of memory target. */
7310 store_expr (TREE_OPERAND (exp, 0),
7311 change_address (target, TYPE_MODE (valtype), 0), 0);
7313 else if (GET_CODE (target) == REG)
7314 /* Store this field into a union of the proper type. */
7315 store_field (target,
7316 MIN ((int_size_in_bytes (TREE_TYPE
7317 (TREE_OPERAND (exp, 0)))
7318 * BITS_PER_UNIT),
7319 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7320 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7321 VOIDmode, 0, BITS_PER_UNIT,
7322 int_size_in_bytes (type), 0);
7323 else
7324 abort ();
7326 /* Return the entire union. */
7327 return target;
7330 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7332 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7333 ro_modifier);
7335 /* If the signedness of the conversion differs and OP0 is
7336 a promoted SUBREG, clear that indication since we now
7337 have to do the proper extension. */
7338 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7339 && GET_CODE (op0) == SUBREG)
7340 SUBREG_PROMOTED_VAR_P (op0) = 0;
7342 return op0;
7345 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7346 if (GET_MODE (op0) == mode)
7347 return op0;
7349 /* If OP0 is a constant, just convert it into the proper mode. */
7350 if (CONSTANT_P (op0))
7351 return
7352 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7353 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7355 if (modifier == EXPAND_INITIALIZER)
7356 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7358 if (target == 0)
7359 return
7360 convert_to_mode (mode, op0,
7361 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7362 else
7363 convert_move (target, op0,
7364 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7365 return target;
7367 case PLUS_EXPR:
7368 /* We come here from MINUS_EXPR when the second operand is a
7369 constant. */
7370 plus_expr:
7371 this_optab = ! unsignedp && flag_trapv
7372 && (GET_MODE_CLASS(mode) == MODE_INT)
7373 ? addv_optab : add_optab;
7375 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7376 something else, make sure we add the register to the constant and
7377 then to the other thing. This case can occur during strength
7378 reduction and doing it this way will produce better code if the
7379 frame pointer or argument pointer is eliminated.
7381 fold-const.c will ensure that the constant is always in the inner
7382 PLUS_EXPR, so the only case we need to do anything about is if
7383 sp, ap, or fp is our second argument, in which case we must swap
7384 the innermost first argument and our second argument. */
7386 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7387 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7388 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7389 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7390 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7391 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7393 tree t = TREE_OPERAND (exp, 1);
7395 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7396 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7399 /* If the result is to be ptr_mode and we are adding an integer to
7400 something, we might be forming a constant. So try to use
7401 plus_constant. If it produces a sum and we can't accept it,
7402 use force_operand. This allows P = &ARR[const] to generate
7403 efficient code on machines where a SYMBOL_REF is not a valid
7404 address.
7406 If this is an EXPAND_SUM call, always return the sum. */
7407 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7408 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7410 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7411 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7412 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7414 rtx constant_part;
7416 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7417 EXPAND_SUM);
7418 /* Use immed_double_const to ensure that the constant is
7419 truncated according to the mode of OP1, then sign extended
7420 to a HOST_WIDE_INT. Using the constant directly can result
7421 in non-canonical RTL in a 64x32 cross compile. */
7422 constant_part
7423 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7424 (HOST_WIDE_INT) 0,
7425 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7426 op1 = plus_constant (op1, INTVAL (constant_part));
7427 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7428 op1 = force_operand (op1, target);
7429 return op1;
7432 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7433 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7434 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7436 rtx constant_part;
7438 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7439 EXPAND_SUM);
7440 if (! CONSTANT_P (op0))
7442 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7443 VOIDmode, modifier);
7444 /* Don't go to both_summands if modifier
7445 says it's not right to return a PLUS. */
7446 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7447 goto binop2;
7448 goto both_summands;
7450 /* Use immed_double_const to ensure that the constant is
7451 truncated according to the mode of OP1, then sign extended
7452 to a HOST_WIDE_INT. Using the constant directly can result
7453 in non-canonical RTL in a 64x32 cross compile. */
7454 constant_part
7455 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7456 (HOST_WIDE_INT) 0,
7457 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7458 op0 = plus_constant (op0, INTVAL (constant_part));
7459 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7460 op0 = force_operand (op0, target);
7461 return op0;
7465 /* No sense saving up arithmetic to be done
7466 if it's all in the wrong mode to form part of an address.
7467 And force_operand won't know whether to sign-extend or
7468 zero-extend. */
7469 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7470 || mode != ptr_mode)
7471 goto binop;
7473 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7474 subtarget = 0;
7476 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7477 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7479 both_summands:
7480 /* Make sure any term that's a sum with a constant comes last. */
7481 if (GET_CODE (op0) == PLUS
7482 && CONSTANT_P (XEXP (op0, 1)))
7484 temp = op0;
7485 op0 = op1;
7486 op1 = temp;
7488 /* If adding to a sum including a constant,
7489 associate it to put the constant outside. */
7490 if (GET_CODE (op1) == PLUS
7491 && CONSTANT_P (XEXP (op1, 1)))
7493 rtx constant_term = const0_rtx;
7495 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7496 if (temp != 0)
7497 op0 = temp;
7498 /* Ensure that MULT comes first if there is one. */
7499 else if (GET_CODE (op0) == MULT)
7500 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7501 else
7502 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7504 /* Let's also eliminate constants from op0 if possible. */
7505 op0 = eliminate_constant_term (op0, &constant_term);
7507 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7508 their sum should be a constant. Form it into OP1, since the
7509 result we want will then be OP0 + OP1. */
7511 temp = simplify_binary_operation (PLUS, mode, constant_term,
7512 XEXP (op1, 1));
7513 if (temp != 0)
7514 op1 = temp;
7515 else
7516 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7519 /* Put a constant term last and put a multiplication first. */
7520 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7521 temp = op1, op1 = op0, op0 = temp;
7523 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7524 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7526 case MINUS_EXPR:
7527 /* For initializers, we are allowed to return a MINUS of two
7528 symbolic constants. Here we handle all cases when both operands
7529 are constant. */
7530 /* Handle difference of two symbolic constants,
7531 for the sake of an initializer. */
7532 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7533 && really_constant_p (TREE_OPERAND (exp, 0))
7534 && really_constant_p (TREE_OPERAND (exp, 1)))
7536 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7537 VOIDmode, ro_modifier);
7538 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7539 VOIDmode, ro_modifier);
7541 /* If the last operand is a CONST_INT, use plus_constant of
7542 the negated constant. Else make the MINUS. */
7543 if (GET_CODE (op1) == CONST_INT)
7544 return plus_constant (op0, - INTVAL (op1));
7545 else
7546 return gen_rtx_MINUS (mode, op0, op1);
7548 /* Convert A - const to A + (-const). */
7549 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7551 tree negated = fold (build1 (NEGATE_EXPR, type,
7552 TREE_OPERAND (exp, 1)));
7554 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7555 /* If we can't negate the constant in TYPE, leave it alone and
7556 expand_binop will negate it for us. We used to try to do it
7557 here in the signed version of TYPE, but that doesn't work
7558 on POINTER_TYPEs. */;
7559 else
7561 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7562 goto plus_expr;
7565 this_optab = ! unsignedp && flag_trapv
7566 && (GET_MODE_CLASS(mode) == MODE_INT)
7567 ? subv_optab : sub_optab;
7568 goto binop;
7570 case MULT_EXPR:
7571 /* If first operand is constant, swap them.
7572 Thus the following special case checks need only
7573 check the second operand. */
7574 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7576 register tree t1 = TREE_OPERAND (exp, 0);
7577 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7578 TREE_OPERAND (exp, 1) = t1;
7581 /* Attempt to return something suitable for generating an
7582 indexed address, for machines that support that. */
7584 if (modifier == EXPAND_SUM && mode == ptr_mode
7585 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7586 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7588 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7589 EXPAND_SUM);
7591 /* Apply distributive law if OP0 is x+c. */
7592 if (GET_CODE (op0) == PLUS
7593 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7594 return
7595 gen_rtx_PLUS
7596 (mode,
7597 gen_rtx_MULT
7598 (mode, XEXP (op0, 0),
7599 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7600 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7601 * INTVAL (XEXP (op0, 1))));
7603 if (GET_CODE (op0) != REG)
7604 op0 = force_operand (op0, NULL_RTX);
7605 if (GET_CODE (op0) != REG)
7606 op0 = copy_to_mode_reg (mode, op0);
7608 return
7609 gen_rtx_MULT (mode, op0,
7610 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7613 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7614 subtarget = 0;
7616 /* Check for multiplying things that have been extended
7617 from a narrower type. If this machine supports multiplying
7618 in that narrower type with a result in the desired type,
7619 do it that way, and avoid the explicit type-conversion. */
7620 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7621 && TREE_CODE (type) == INTEGER_TYPE
7622 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7623 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7624 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7625 && int_fits_type_p (TREE_OPERAND (exp, 1),
7626 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7627 /* Don't use a widening multiply if a shift will do. */
7628 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7629 > HOST_BITS_PER_WIDE_INT)
7630 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7632 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7633 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7635 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7636 /* If both operands are extended, they must either both
7637 be zero-extended or both be sign-extended. */
7638 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7640 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7642 enum machine_mode innermode
7643 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7644 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7645 ? smul_widen_optab : umul_widen_optab);
7646 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7647 ? umul_widen_optab : smul_widen_optab);
7648 if (mode == GET_MODE_WIDER_MODE (innermode))
7650 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7652 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7653 NULL_RTX, VOIDmode, 0);
7654 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7655 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7656 VOIDmode, 0);
7657 else
7658 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7659 NULL_RTX, VOIDmode, 0);
7660 goto binop2;
7662 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7663 && innermode == word_mode)
7665 rtx htem;
7666 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7667 NULL_RTX, VOIDmode, 0);
7668 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7669 op1 = convert_modes (innermode, mode,
7670 expand_expr (TREE_OPERAND (exp, 1),
7671 NULL_RTX, VOIDmode, 0),
7672 unsignedp);
7673 else
7674 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7675 NULL_RTX, VOIDmode, 0);
7676 temp = expand_binop (mode, other_optab, op0, op1, target,
7677 unsignedp, OPTAB_LIB_WIDEN);
7678 htem = expand_mult_highpart_adjust (innermode,
7679 gen_highpart (innermode, temp),
7680 op0, op1,
7681 gen_highpart (innermode, temp),
7682 unsignedp);
7683 emit_move_insn (gen_highpart (innermode, temp), htem);
7684 return temp;
7688 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7689 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7690 return expand_mult (mode, op0, op1, target, unsignedp);
7692 case TRUNC_DIV_EXPR:
7693 case FLOOR_DIV_EXPR:
7694 case CEIL_DIV_EXPR:
7695 case ROUND_DIV_EXPR:
7696 case EXACT_DIV_EXPR:
7697 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7698 subtarget = 0;
7699 /* Possible optimization: compute the dividend with EXPAND_SUM
7700 then if the divisor is constant can optimize the case
7701 where some terms of the dividend have coeffs divisible by it. */
7702 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7703 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7704 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7706 case RDIV_EXPR:
7707 this_optab = flodiv_optab;
7708 goto binop;
7710 case TRUNC_MOD_EXPR:
7711 case FLOOR_MOD_EXPR:
7712 case CEIL_MOD_EXPR:
7713 case ROUND_MOD_EXPR:
7714 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7715 subtarget = 0;
7716 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7717 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7718 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7720 case FIX_ROUND_EXPR:
7721 case FIX_FLOOR_EXPR:
7722 case FIX_CEIL_EXPR:
7723 abort (); /* Not used for C. */
7725 case FIX_TRUNC_EXPR:
7726 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7727 if (target == 0)
7728 target = gen_reg_rtx (mode);
7729 expand_fix (target, op0, unsignedp);
7730 return target;
7732 case FLOAT_EXPR:
7733 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7734 if (target == 0)
7735 target = gen_reg_rtx (mode);
7736 /* expand_float can't figure out what to do if FROM has VOIDmode.
7737 So give it the correct mode. With -O, cse will optimize this. */
7738 if (GET_MODE (op0) == VOIDmode)
7739 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7740 op0);
7741 expand_float (target, op0,
7742 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7743 return target;
7745 case NEGATE_EXPR:
7746 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7747 temp = expand_unop (mode,
7748 ! unsignedp && flag_trapv
7749 && (GET_MODE_CLASS(mode) == MODE_INT)
7750 ? negv_optab : neg_optab, op0, target, 0);
7751 if (temp == 0)
7752 abort ();
7753 return temp;
7755 case ABS_EXPR:
7756 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7758 /* Handle complex values specially. */
7759 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7760 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7761 return expand_complex_abs (mode, op0, target, unsignedp);
7763 /* Unsigned abs is simply the operand. Testing here means we don't
7764 risk generating incorrect code below. */
7765 if (TREE_UNSIGNED (type))
7766 return op0;
7768 return expand_abs (mode, op0, target, unsignedp,
7769 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7771 case MAX_EXPR:
7772 case MIN_EXPR:
7773 target = original_target;
7774 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7775 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7776 || GET_MODE (target) != mode
7777 || (GET_CODE (target) == REG
7778 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7779 target = gen_reg_rtx (mode);
7780 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7781 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7783 /* First try to do it with a special MIN or MAX instruction.
7784 If that does not win, use a conditional jump to select the proper
7785 value. */
7786 this_optab = (TREE_UNSIGNED (type)
7787 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7788 : (code == MIN_EXPR ? smin_optab : smax_optab));
7790 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7791 OPTAB_WIDEN);
7792 if (temp != 0)
7793 return temp;
7795 /* At this point, a MEM target is no longer useful; we will get better
7796 code without it. */
7798 if (GET_CODE (target) == MEM)
7799 target = gen_reg_rtx (mode);
7801 if (target != op0)
7802 emit_move_insn (target, op0);
7804 op0 = gen_label_rtx ();
7806 /* If this mode is an integer too wide to compare properly,
7807 compare word by word. Rely on cse to optimize constant cases. */
7808 if (GET_MODE_CLASS (mode) == MODE_INT
7809 && ! can_compare_p (GE, mode, ccp_jump))
7811 if (code == MAX_EXPR)
7812 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7813 target, op1, NULL_RTX, op0);
7814 else
7815 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7816 op1, target, NULL_RTX, op0);
7818 else
7820 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7821 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7822 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7823 op0);
7825 emit_move_insn (target, op1);
7826 emit_label (op0);
7827 return target;
7829 case BIT_NOT_EXPR:
7830 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7831 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7832 if (temp == 0)
7833 abort ();
7834 return temp;
7836 case FFS_EXPR:
7837 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7838 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7839 if (temp == 0)
7840 abort ();
7841 return temp;
7843 /* ??? Can optimize bitwise operations with one arg constant.
7844 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7845 and (a bitwise1 b) bitwise2 b (etc)
7846 but that is probably not worth while. */
7848 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7849 boolean values when we want in all cases to compute both of them. In
7850 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7851 as actual zero-or-1 values and then bitwise anding. In cases where
7852 there cannot be any side effects, better code would be made by
7853 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7854 how to recognize those cases. */
7856 case TRUTH_AND_EXPR:
7857 case BIT_AND_EXPR:
7858 this_optab = and_optab;
7859 goto binop;
7861 case TRUTH_OR_EXPR:
7862 case BIT_IOR_EXPR:
7863 this_optab = ior_optab;
7864 goto binop;
7866 case TRUTH_XOR_EXPR:
7867 case BIT_XOR_EXPR:
7868 this_optab = xor_optab;
7869 goto binop;
7871 case LSHIFT_EXPR:
7872 case RSHIFT_EXPR:
7873 case LROTATE_EXPR:
7874 case RROTATE_EXPR:
7875 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7876 subtarget = 0;
7877 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7878 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7879 unsignedp);
7881 /* Could determine the answer when only additive constants differ. Also,
7882 the addition of one can be handled by changing the condition. */
7883 case LT_EXPR:
7884 case LE_EXPR:
7885 case GT_EXPR:
7886 case GE_EXPR:
7887 case EQ_EXPR:
7888 case NE_EXPR:
7889 case UNORDERED_EXPR:
7890 case ORDERED_EXPR:
7891 case UNLT_EXPR:
7892 case UNLE_EXPR:
7893 case UNGT_EXPR:
7894 case UNGE_EXPR:
7895 case UNEQ_EXPR:
7896 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7897 if (temp != 0)
7898 return temp;
7900 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7901 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7902 && original_target
7903 && GET_CODE (original_target) == REG
7904 && (GET_MODE (original_target)
7905 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7907 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7908 VOIDmode, 0);
7910 if (temp != original_target)
7911 temp = copy_to_reg (temp);
7913 op1 = gen_label_rtx ();
7914 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7915 GET_MODE (temp), unsignedp, 0, op1);
7916 emit_move_insn (temp, const1_rtx);
7917 emit_label (op1);
7918 return temp;
7921 /* If no set-flag instruction, must generate a conditional
7922 store into a temporary variable. Drop through
7923 and handle this like && and ||. */
7925 case TRUTH_ANDIF_EXPR:
7926 case TRUTH_ORIF_EXPR:
7927 if (! ignore
7928 && (target == 0 || ! safe_from_p (target, exp, 1)
7929 /* Make sure we don't have a hard reg (such as function's return
7930 value) live across basic blocks, if not optimizing. */
7931 || (!optimize && GET_CODE (target) == REG
7932 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7933 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7935 if (target)
7936 emit_clr_insn (target);
7938 op1 = gen_label_rtx ();
7939 jumpifnot (exp, op1);
7941 if (target)
7942 emit_0_to_1_insn (target);
7944 emit_label (op1);
7945 return ignore ? const0_rtx : target;
7947 case TRUTH_NOT_EXPR:
7948 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7949 /* The parser is careful to generate TRUTH_NOT_EXPR
7950 only with operands that are always zero or one. */
7951 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7952 target, 1, OPTAB_LIB_WIDEN);
7953 if (temp == 0)
7954 abort ();
7955 return temp;
7957 case COMPOUND_EXPR:
7958 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7959 emit_queue ();
7960 return expand_expr (TREE_OPERAND (exp, 1),
7961 (ignore ? const0_rtx : target),
7962 VOIDmode, 0);
7964 case COND_EXPR:
7965 /* If we would have a "singleton" (see below) were it not for a
7966 conversion in each arm, bring that conversion back out. */
7967 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7968 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7969 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7970 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7972 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7973 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7975 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
7976 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
7977 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
7978 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
7979 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
7980 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
7981 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
7982 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
7983 return expand_expr (build1 (NOP_EXPR, type,
7984 build (COND_EXPR, TREE_TYPE (iftrue),
7985 TREE_OPERAND (exp, 0),
7986 iftrue, iffalse)),
7987 target, tmode, modifier);
7991 /* Note that COND_EXPRs whose type is a structure or union
7992 are required to be constructed to contain assignments of
7993 a temporary variable, so that we can evaluate them here
7994 for side effect only. If type is void, we must do likewise. */
7996 /* If an arm of the branch requires a cleanup,
7997 only that cleanup is performed. */
7999 tree singleton = 0;
8000 tree binary_op = 0, unary_op = 0;
8002 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8003 convert it to our mode, if necessary. */
8004 if (integer_onep (TREE_OPERAND (exp, 1))
8005 && integer_zerop (TREE_OPERAND (exp, 2))
8006 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8008 if (ignore)
8010 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8011 ro_modifier);
8012 return const0_rtx;
8015 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
8016 if (GET_MODE (op0) == mode)
8017 return op0;
8019 if (target == 0)
8020 target = gen_reg_rtx (mode);
8021 convert_move (target, op0, unsignedp);
8022 return target;
8025 /* Check for X ? A + B : A. If we have this, we can copy A to the
8026 output and conditionally add B. Similarly for unary operations.
8027 Don't do this if X has side-effects because those side effects
8028 might affect A or B and the "?" operation is a sequence point in
8029 ANSI. (operand_equal_p tests for side effects.) */
8031 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8032 && operand_equal_p (TREE_OPERAND (exp, 2),
8033 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8034 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8035 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8036 && operand_equal_p (TREE_OPERAND (exp, 1),
8037 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8038 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8039 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8040 && operand_equal_p (TREE_OPERAND (exp, 2),
8041 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8042 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8043 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8044 && operand_equal_p (TREE_OPERAND (exp, 1),
8045 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8046 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8048 /* If we are not to produce a result, we have no target. Otherwise,
8049 if a target was specified use it; it will not be used as an
8050 intermediate target unless it is safe. If no target, use a
8051 temporary. */
8053 if (ignore)
8054 temp = 0;
8055 else if (original_target
8056 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8057 || (singleton && GET_CODE (original_target) == REG
8058 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8059 && original_target == var_rtx (singleton)))
8060 && GET_MODE (original_target) == mode
8061 #ifdef HAVE_conditional_move
8062 && (! can_conditionally_move_p (mode)
8063 || GET_CODE (original_target) == REG
8064 || TREE_ADDRESSABLE (type))
8065 #endif
8066 && ! (GET_CODE (original_target) == MEM
8067 && MEM_VOLATILE_P (original_target)))
8068 temp = original_target;
8069 else if (TREE_ADDRESSABLE (type))
8070 abort ();
8071 else
8072 temp = assign_temp (type, 0, 0, 1);
8074 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8075 do the test of X as a store-flag operation, do this as
8076 A + ((X != 0) << log C). Similarly for other simple binary
8077 operators. Only do for C == 1 if BRANCH_COST is low. */
8078 if (temp && singleton && binary_op
8079 && (TREE_CODE (binary_op) == PLUS_EXPR
8080 || TREE_CODE (binary_op) == MINUS_EXPR
8081 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8082 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8083 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8084 : integer_onep (TREE_OPERAND (binary_op, 1)))
8085 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8087 rtx result;
8088 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8089 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8090 ? addv_optab : add_optab)
8091 : TREE_CODE (binary_op) == MINUS_EXPR
8092 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8093 ? subv_optab : sub_optab)
8094 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8095 : xor_optab);
8097 /* If we had X ? A : A + 1, do this as A + (X == 0).
8099 We have to invert the truth value here and then put it
8100 back later if do_store_flag fails. We cannot simply copy
8101 TREE_OPERAND (exp, 0) to another variable and modify that
8102 because invert_truthvalue can modify the tree pointed to
8103 by its argument. */
8104 if (singleton == TREE_OPERAND (exp, 1))
8105 TREE_OPERAND (exp, 0)
8106 = invert_truthvalue (TREE_OPERAND (exp, 0));
8108 result = do_store_flag (TREE_OPERAND (exp, 0),
8109 (safe_from_p (temp, singleton, 1)
8110 ? temp : NULL_RTX),
8111 mode, BRANCH_COST <= 1);
8113 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8114 result = expand_shift (LSHIFT_EXPR, mode, result,
8115 build_int_2 (tree_log2
8116 (TREE_OPERAND
8117 (binary_op, 1)),
8119 (safe_from_p (temp, singleton, 1)
8120 ? temp : NULL_RTX), 0);
8122 if (result)
8124 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8125 return expand_binop (mode, boptab, op1, result, temp,
8126 unsignedp, OPTAB_LIB_WIDEN);
8128 else if (singleton == TREE_OPERAND (exp, 1))
8129 TREE_OPERAND (exp, 0)
8130 = invert_truthvalue (TREE_OPERAND (exp, 0));
8133 do_pending_stack_adjust ();
8134 NO_DEFER_POP;
8135 op0 = gen_label_rtx ();
8137 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8139 if (temp != 0)
8141 /* If the target conflicts with the other operand of the
8142 binary op, we can't use it. Also, we can't use the target
8143 if it is a hard register, because evaluating the condition
8144 might clobber it. */
8145 if ((binary_op
8146 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8147 || (GET_CODE (temp) == REG
8148 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8149 temp = gen_reg_rtx (mode);
8150 store_expr (singleton, temp, 0);
8152 else
8153 expand_expr (singleton,
8154 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8155 if (singleton == TREE_OPERAND (exp, 1))
8156 jumpif (TREE_OPERAND (exp, 0), op0);
8157 else
8158 jumpifnot (TREE_OPERAND (exp, 0), op0);
8160 start_cleanup_deferral ();
8161 if (binary_op && temp == 0)
8162 /* Just touch the other operand. */
8163 expand_expr (TREE_OPERAND (binary_op, 1),
8164 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8165 else if (binary_op)
8166 store_expr (build (TREE_CODE (binary_op), type,
8167 make_tree (type, temp),
8168 TREE_OPERAND (binary_op, 1)),
8169 temp, 0);
8170 else
8171 store_expr (build1 (TREE_CODE (unary_op), type,
8172 make_tree (type, temp)),
8173 temp, 0);
8174 op1 = op0;
8176 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8177 comparison operator. If we have one of these cases, set the
8178 output to A, branch on A (cse will merge these two references),
8179 then set the output to FOO. */
8180 else if (temp
8181 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8182 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8183 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8184 TREE_OPERAND (exp, 1), 0)
8185 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8186 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8187 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8189 if (GET_CODE (temp) == REG
8190 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8191 temp = gen_reg_rtx (mode);
8192 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8193 jumpif (TREE_OPERAND (exp, 0), op0);
8195 start_cleanup_deferral ();
8196 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8197 op1 = op0;
8199 else if (temp
8200 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8201 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8202 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8203 TREE_OPERAND (exp, 2), 0)
8204 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8205 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8206 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8208 if (GET_CODE (temp) == REG
8209 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8210 temp = gen_reg_rtx (mode);
8211 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8212 jumpifnot (TREE_OPERAND (exp, 0), op0);
8214 start_cleanup_deferral ();
8215 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8216 op1 = op0;
8218 else
8220 op1 = gen_label_rtx ();
8221 jumpifnot (TREE_OPERAND (exp, 0), op0);
8223 start_cleanup_deferral ();
8225 /* One branch of the cond can be void, if it never returns. For
8226 example A ? throw : E */
8227 if (temp != 0
8228 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8229 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8230 else
8231 expand_expr (TREE_OPERAND (exp, 1),
8232 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8233 end_cleanup_deferral ();
8234 emit_queue ();
8235 emit_jump_insn (gen_jump (op1));
8236 emit_barrier ();
8237 emit_label (op0);
8238 start_cleanup_deferral ();
8239 if (temp != 0
8240 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8241 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8242 else
8243 expand_expr (TREE_OPERAND (exp, 2),
8244 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8247 end_cleanup_deferral ();
8249 emit_queue ();
8250 emit_label (op1);
8251 OK_DEFER_POP;
8253 return temp;
8256 case TARGET_EXPR:
8258 /* Something needs to be initialized, but we didn't know
8259 where that thing was when building the tree. For example,
8260 it could be the return value of a function, or a parameter
8261 to a function which lays down in the stack, or a temporary
8262 variable which must be passed by reference.
8264 We guarantee that the expression will either be constructed
8265 or copied into our original target. */
8267 tree slot = TREE_OPERAND (exp, 0);
8268 tree cleanups = NULL_TREE;
8269 tree exp1;
8271 if (TREE_CODE (slot) != VAR_DECL)
8272 abort ();
8274 if (! ignore)
8275 target = original_target;
8277 /* Set this here so that if we get a target that refers to a
8278 register variable that's already been used, put_reg_into_stack
8279 knows that it should fix up those uses. */
8280 TREE_USED (slot) = 1;
8282 if (target == 0)
8284 if (DECL_RTL_SET_P (slot))
8286 target = DECL_RTL (slot);
8287 /* If we have already expanded the slot, so don't do
8288 it again. (mrs) */
8289 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8290 return target;
8292 else
8294 target = assign_temp (type, 2, 0, 1);
8295 /* All temp slots at this level must not conflict. */
8296 preserve_temp_slots (target);
8297 SET_DECL_RTL (slot, target);
8298 if (TREE_ADDRESSABLE (slot))
8299 put_var_into_stack (slot);
8301 /* Since SLOT is not known to the called function
8302 to belong to its stack frame, we must build an explicit
8303 cleanup. This case occurs when we must build up a reference
8304 to pass the reference as an argument. In this case,
8305 it is very likely that such a reference need not be
8306 built here. */
8308 if (TREE_OPERAND (exp, 2) == 0)
8309 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8310 cleanups = TREE_OPERAND (exp, 2);
8313 else
8315 /* This case does occur, when expanding a parameter which
8316 needs to be constructed on the stack. The target
8317 is the actual stack address that we want to initialize.
8318 The function we call will perform the cleanup in this case. */
8320 /* If we have already assigned it space, use that space,
8321 not target that we were passed in, as our target
8322 parameter is only a hint. */
8323 if (DECL_RTL_SET_P (slot))
8325 target = DECL_RTL (slot);
8326 /* If we have already expanded the slot, so don't do
8327 it again. (mrs) */
8328 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8329 return target;
8331 else
8333 SET_DECL_RTL (slot, target);
8334 /* If we must have an addressable slot, then make sure that
8335 the RTL that we just stored in slot is OK. */
8336 if (TREE_ADDRESSABLE (slot))
8337 put_var_into_stack (slot);
8341 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8342 /* Mark it as expanded. */
8343 TREE_OPERAND (exp, 1) = NULL_TREE;
8345 store_expr (exp1, target, 0);
8347 expand_decl_cleanup (NULL_TREE, cleanups);
8349 return target;
8352 case INIT_EXPR:
8354 tree lhs = TREE_OPERAND (exp, 0);
8355 tree rhs = TREE_OPERAND (exp, 1);
8356 tree noncopied_parts = 0;
8357 tree lhs_type = TREE_TYPE (lhs);
8359 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8360 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8361 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8362 TYPE_NONCOPIED_PARTS (lhs_type));
8363 while (noncopied_parts != 0)
8365 expand_assignment (TREE_VALUE (noncopied_parts),
8366 TREE_PURPOSE (noncopied_parts), 0, 0);
8367 noncopied_parts = TREE_CHAIN (noncopied_parts);
8369 return temp;
8372 case MODIFY_EXPR:
8374 /* If lhs is complex, expand calls in rhs before computing it.
8375 That's so we don't compute a pointer and save it over a call.
8376 If lhs is simple, compute it first so we can give it as a
8377 target if the rhs is just a call. This avoids an extra temp and copy
8378 and that prevents a partial-subsumption which makes bad code.
8379 Actually we could treat component_ref's of vars like vars. */
8381 tree lhs = TREE_OPERAND (exp, 0);
8382 tree rhs = TREE_OPERAND (exp, 1);
8383 tree noncopied_parts = 0;
8384 tree lhs_type = TREE_TYPE (lhs);
8386 temp = 0;
8388 /* Check for |= or &= of a bitfield of size one into another bitfield
8389 of size 1. In this case, (unless we need the result of the
8390 assignment) we can do this more efficiently with a
8391 test followed by an assignment, if necessary.
8393 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8394 things change so we do, this code should be enhanced to
8395 support it. */
8396 if (ignore
8397 && TREE_CODE (lhs) == COMPONENT_REF
8398 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8399 || TREE_CODE (rhs) == BIT_AND_EXPR)
8400 && TREE_OPERAND (rhs, 0) == lhs
8401 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8402 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8403 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8405 rtx label = gen_label_rtx ();
8407 do_jump (TREE_OPERAND (rhs, 1),
8408 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8409 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8410 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8411 (TREE_CODE (rhs) == BIT_IOR_EXPR
8412 ? integer_one_node
8413 : integer_zero_node)),
8414 0, 0);
8415 do_pending_stack_adjust ();
8416 emit_label (label);
8417 return const0_rtx;
8420 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8421 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8422 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8423 TYPE_NONCOPIED_PARTS (lhs_type));
8425 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8426 while (noncopied_parts != 0)
8428 expand_assignment (TREE_PURPOSE (noncopied_parts),
8429 TREE_VALUE (noncopied_parts), 0, 0);
8430 noncopied_parts = TREE_CHAIN (noncopied_parts);
8432 return temp;
8435 case RETURN_EXPR:
8436 if (!TREE_OPERAND (exp, 0))
8437 expand_null_return ();
8438 else
8439 expand_return (TREE_OPERAND (exp, 0));
8440 return const0_rtx;
8442 case PREINCREMENT_EXPR:
8443 case PREDECREMENT_EXPR:
8444 return expand_increment (exp, 0, ignore);
8446 case POSTINCREMENT_EXPR:
8447 case POSTDECREMENT_EXPR:
8448 /* Faster to treat as pre-increment if result is not used. */
8449 return expand_increment (exp, ! ignore, ignore);
8451 case ADDR_EXPR:
8452 /* If nonzero, TEMP will be set to the address of something that might
8453 be a MEM corresponding to a stack slot. */
8454 temp = 0;
8456 /* Are we taking the address of a nested function? */
8457 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8458 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8459 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8460 && ! TREE_STATIC (exp))
8462 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8463 op0 = force_operand (op0, target);
8465 /* If we are taking the address of something erroneous, just
8466 return a zero. */
8467 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8468 return const0_rtx;
8469 else
8471 /* We make sure to pass const0_rtx down if we came in with
8472 ignore set, to avoid doing the cleanups twice for something. */
8473 op0 = expand_expr (TREE_OPERAND (exp, 0),
8474 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8475 (modifier == EXPAND_INITIALIZER
8476 ? modifier : EXPAND_CONST_ADDRESS));
8478 /* If we are going to ignore the result, OP0 will have been set
8479 to const0_rtx, so just return it. Don't get confused and
8480 think we are taking the address of the constant. */
8481 if (ignore)
8482 return op0;
8484 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8485 clever and returns a REG when given a MEM. */
8486 op0 = protect_from_queue (op0, 1);
8488 /* We would like the object in memory. If it is a constant, we can
8489 have it be statically allocated into memory. For a non-constant,
8490 we need to allocate some memory and store the value into it. */
8492 if (CONSTANT_P (op0))
8493 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8494 op0);
8495 else if (GET_CODE (op0) == MEM)
8497 mark_temp_addr_taken (op0);
8498 temp = XEXP (op0, 0);
8501 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8502 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8503 || GET_CODE (op0) == PARALLEL)
8505 /* If this object is in a register, it must be not
8506 be BLKmode. */
8507 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8508 tree nt = build_qualified_type (inner_type,
8509 (TYPE_QUALS (inner_type)
8510 | TYPE_QUAL_CONST));
8511 rtx memloc = assign_temp (nt, 1, 1, 1);
8513 mark_temp_addr_taken (memloc);
8514 if (GET_CODE (op0) == PARALLEL)
8515 /* Handle calls that pass values in multiple non-contiguous
8516 locations. The Irix 6 ABI has examples of this. */
8517 emit_group_store (memloc, op0,
8518 int_size_in_bytes (inner_type),
8519 TYPE_ALIGN (inner_type));
8520 else
8521 emit_move_insn (memloc, op0);
8522 op0 = memloc;
8525 if (GET_CODE (op0) != MEM)
8526 abort ();
8528 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8530 temp = XEXP (op0, 0);
8531 #ifdef POINTERS_EXTEND_UNSIGNED
8532 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8533 && mode == ptr_mode)
8534 temp = convert_memory_address (ptr_mode, temp);
8535 #endif
8536 return temp;
8539 op0 = force_operand (XEXP (op0, 0), target);
8542 if (flag_force_addr && GET_CODE (op0) != REG)
8543 op0 = force_reg (Pmode, op0);
8545 if (GET_CODE (op0) == REG
8546 && ! REG_USERVAR_P (op0))
8547 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8549 /* If we might have had a temp slot, add an equivalent address
8550 for it. */
8551 if (temp != 0)
8552 update_temp_slot_address (temp, op0);
8554 #ifdef POINTERS_EXTEND_UNSIGNED
8555 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8556 && mode == ptr_mode)
8557 op0 = convert_memory_address (ptr_mode, op0);
8558 #endif
8560 return op0;
8562 case ENTRY_VALUE_EXPR:
8563 abort ();
8565 /* COMPLEX type for Extended Pascal & Fortran */
8566 case COMPLEX_EXPR:
8568 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8569 rtx insns;
8571 /* Get the rtx code of the operands. */
8572 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8573 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8575 if (! target)
8576 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8578 start_sequence ();
8580 /* Move the real (op0) and imaginary (op1) parts to their location. */
8581 emit_move_insn (gen_realpart (mode, target), op0);
8582 emit_move_insn (gen_imagpart (mode, target), op1);
8584 insns = get_insns ();
8585 end_sequence ();
8587 /* Complex construction should appear as a single unit. */
8588 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8589 each with a separate pseudo as destination.
8590 It's not correct for flow to treat them as a unit. */
8591 if (GET_CODE (target) != CONCAT)
8592 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8593 else
8594 emit_insns (insns);
8596 return target;
8599 case REALPART_EXPR:
8600 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8601 return gen_realpart (mode, op0);
8603 case IMAGPART_EXPR:
8604 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8605 return gen_imagpart (mode, op0);
8607 case CONJ_EXPR:
8609 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8610 rtx imag_t;
8611 rtx insns;
8613 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8615 if (! target)
8616 target = gen_reg_rtx (mode);
8618 start_sequence ();
8620 /* Store the realpart and the negated imagpart to target. */
8621 emit_move_insn (gen_realpart (partmode, target),
8622 gen_realpart (partmode, op0));
8624 imag_t = gen_imagpart (partmode, target);
8625 temp = expand_unop (partmode,
8626 ! unsignedp && flag_trapv
8627 && (GET_MODE_CLASS(partmode) == MODE_INT)
8628 ? negv_optab : neg_optab,
8629 gen_imagpart (partmode, op0), imag_t, 0);
8630 if (temp != imag_t)
8631 emit_move_insn (imag_t, temp);
8633 insns = get_insns ();
8634 end_sequence ();
8636 /* Conjugate should appear as a single unit
8637 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8638 each with a separate pseudo as destination.
8639 It's not correct for flow to treat them as a unit. */
8640 if (GET_CODE (target) != CONCAT)
8641 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8642 else
8643 emit_insns (insns);
8645 return target;
8648 case TRY_CATCH_EXPR:
8650 tree handler = TREE_OPERAND (exp, 1);
8652 expand_eh_region_start ();
8654 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8656 expand_eh_region_end_cleanup (handler);
8658 return op0;
8661 case TRY_FINALLY_EXPR:
8663 tree try_block = TREE_OPERAND (exp, 0);
8664 tree finally_block = TREE_OPERAND (exp, 1);
8665 rtx finally_label = gen_label_rtx ();
8666 rtx done_label = gen_label_rtx ();
8667 rtx return_link = gen_reg_rtx (Pmode);
8668 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8669 (tree) finally_label, (tree) return_link);
8670 TREE_SIDE_EFFECTS (cleanup) = 1;
8672 /* Start a new binding layer that will keep track of all cleanup
8673 actions to be performed. */
8674 expand_start_bindings (2);
8676 target_temp_slot_level = temp_slot_level;
8678 expand_decl_cleanup (NULL_TREE, cleanup);
8679 op0 = expand_expr (try_block, target, tmode, modifier);
8681 preserve_temp_slots (op0);
8682 expand_end_bindings (NULL_TREE, 0, 0);
8683 emit_jump (done_label);
8684 emit_label (finally_label);
8685 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8686 emit_indirect_jump (return_link);
8687 emit_label (done_label);
8688 return op0;
8691 case GOTO_SUBROUTINE_EXPR:
8693 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8694 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8695 rtx return_address = gen_label_rtx ();
8696 emit_move_insn (return_link,
8697 gen_rtx_LABEL_REF (Pmode, return_address));
8698 emit_jump (subr);
8699 emit_label (return_address);
8700 return const0_rtx;
8703 case VA_ARG_EXPR:
8704 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8706 case EXC_PTR_EXPR:
8707 return get_exception_pointer (cfun);
8709 default:
8710 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8713 /* Here to do an ordinary binary operator, generating an instruction
8714 from the optab already placed in `this_optab'. */
8715 binop:
8716 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8717 subtarget = 0;
8718 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8719 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8720 binop2:
8721 temp = expand_binop (mode, this_optab, op0, op1, target,
8722 unsignedp, OPTAB_LIB_WIDEN);
8723 if (temp == 0)
8724 abort ();
8725 return temp;
8728 /* Similar to expand_expr, except that we don't specify a target, target
8729 mode, or modifier and we return the alignment of the inner type. This is
8730 used in cases where it is not necessary to align the result to the
8731 alignment of its type as long as we know the alignment of the result, for
8732 example for comparisons of BLKmode values. */
8734 static rtx
8735 expand_expr_unaligned (exp, palign)
8736 register tree exp;
8737 unsigned int *palign;
8739 register rtx op0;
8740 tree type = TREE_TYPE (exp);
8741 register enum machine_mode mode = TYPE_MODE (type);
8743 /* Default the alignment we return to that of the type. */
8744 *palign = TYPE_ALIGN (type);
8746 /* The only cases in which we do anything special is if the resulting mode
8747 is BLKmode. */
8748 if (mode != BLKmode)
8749 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8751 switch (TREE_CODE (exp))
8753 case CONVERT_EXPR:
8754 case NOP_EXPR:
8755 case NON_LVALUE_EXPR:
8756 /* Conversions between BLKmode values don't change the underlying
8757 alignment or value. */
8758 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8759 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8760 break;
8762 case ARRAY_REF:
8763 /* Much of the code for this case is copied directly from expand_expr.
8764 We need to duplicate it here because we will do something different
8765 in the fall-through case, so we need to handle the same exceptions
8766 it does. */
8768 tree array = TREE_OPERAND (exp, 0);
8769 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8770 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8771 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8772 HOST_WIDE_INT i;
8774 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8775 abort ();
8777 /* Optimize the special-case of a zero lower bound.
8779 We convert the low_bound to sizetype to avoid some problems
8780 with constant folding. (E.g. suppose the lower bound is 1,
8781 and its mode is QI. Without the conversion, (ARRAY
8782 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8783 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8785 if (! integer_zerop (low_bound))
8786 index = size_diffop (index, convert (sizetype, low_bound));
8788 /* If this is a constant index into a constant array,
8789 just get the value from the array. Handle both the cases when
8790 we have an explicit constructor and when our operand is a variable
8791 that was declared const. */
8793 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8794 && host_integerp (index, 0)
8795 && 0 > compare_tree_int (index,
8796 list_length (CONSTRUCTOR_ELTS
8797 (TREE_OPERAND (exp, 0)))))
8799 tree elem;
8801 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8802 i = tree_low_cst (index, 0);
8803 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8806 if (elem)
8807 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8810 else if (optimize >= 1
8811 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8812 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8813 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8815 if (TREE_CODE (index) == INTEGER_CST)
8817 tree init = DECL_INITIAL (array);
8819 if (TREE_CODE (init) == CONSTRUCTOR)
8821 tree elem;
8823 for (elem = CONSTRUCTOR_ELTS (init);
8824 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8825 elem = TREE_CHAIN (elem))
8828 if (elem)
8829 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8830 palign);
8835 /* Fall through. */
8837 case COMPONENT_REF:
8838 case BIT_FIELD_REF:
8839 /* If the operand is a CONSTRUCTOR, we can just extract the
8840 appropriate field if it is present. Don't do this if we have
8841 already written the data since we want to refer to that copy
8842 and varasm.c assumes that's what we'll do. */
8843 if (TREE_CODE (exp) != ARRAY_REF
8844 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8845 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8847 tree elt;
8849 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8850 elt = TREE_CHAIN (elt))
8851 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8852 /* Note that unlike the case in expand_expr, we know this is
8853 BLKmode and hence not an integer. */
8854 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8858 enum machine_mode mode1;
8859 HOST_WIDE_INT bitsize, bitpos;
8860 tree offset;
8861 int volatilep = 0;
8862 unsigned int alignment;
8863 int unsignedp;
8864 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8865 &mode1, &unsignedp, &volatilep,
8866 &alignment);
8868 /* If we got back the original object, something is wrong. Perhaps
8869 we are evaluating an expression too early. In any event, don't
8870 infinitely recurse. */
8871 if (tem == exp)
8872 abort ();
8874 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8876 /* If this is a constant, put it into a register if it is a
8877 legitimate constant and OFFSET is 0 and memory if it isn't. */
8878 if (CONSTANT_P (op0))
8880 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8882 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8883 && offset == 0)
8884 op0 = force_reg (inner_mode, op0);
8885 else
8886 op0 = validize_mem (force_const_mem (inner_mode, op0));
8889 if (offset != 0)
8891 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8893 /* If this object is in a register, put it into memory.
8894 This case can't occur in C, but can in Ada if we have
8895 unchecked conversion of an expression from a scalar type to
8896 an array or record type. */
8897 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8898 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8900 tree nt = build_qualified_type (TREE_TYPE (tem),
8901 (TYPE_QUALS (TREE_TYPE (tem))
8902 | TYPE_QUAL_CONST));
8903 rtx memloc = assign_temp (nt, 1, 1, 1);
8905 mark_temp_addr_taken (memloc);
8906 emit_move_insn (memloc, op0);
8907 op0 = memloc;
8910 if (GET_CODE (op0) != MEM)
8911 abort ();
8913 if (GET_MODE (offset_rtx) != ptr_mode)
8915 #ifdef POINTERS_EXTEND_UNSIGNED
8916 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8917 #else
8918 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8919 #endif
8922 op0 = change_address (op0, VOIDmode,
8923 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
8924 force_reg (ptr_mode,
8925 offset_rtx)));
8928 /* Don't forget about volatility even if this is a bitfield. */
8929 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
8931 op0 = copy_rtx (op0);
8932 MEM_VOLATILE_P (op0) = 1;
8935 /* Check the access. */
8936 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
8938 rtx to;
8939 int size;
8941 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
8942 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
8944 /* Check the access right of the pointer. */
8945 in_check_memory_usage = 1;
8946 if (size > BITS_PER_UNIT)
8947 emit_library_call (chkr_check_addr_libfunc,
8948 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
8949 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
8950 TYPE_MODE (sizetype),
8951 GEN_INT (MEMORY_USE_RO),
8952 TYPE_MODE (integer_type_node));
8953 in_check_memory_usage = 0;
8956 /* In cases where an aligned union has an unaligned object
8957 as a field, we might be extracting a BLKmode value from
8958 an integer-mode (e.g., SImode) object. Handle this case
8959 by doing the extract into an object as wide as the field
8960 (which we know to be the width of a basic mode), then
8961 storing into memory, and changing the mode to BLKmode.
8962 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8963 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8964 if (mode1 == VOIDmode
8965 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8966 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
8967 && (TYPE_ALIGN (type) > alignment
8968 || bitpos % TYPE_ALIGN (type) != 0)))
8970 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
8972 if (ext_mode == BLKmode)
8974 /* In this case, BITPOS must start at a byte boundary. */
8975 if (GET_CODE (op0) != MEM
8976 || bitpos % BITS_PER_UNIT != 0)
8977 abort ();
8979 op0 = change_address (op0, VOIDmode,
8980 plus_constant (XEXP (op0, 0),
8981 bitpos / BITS_PER_UNIT));
8983 else
8985 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
8986 TYPE_QUAL_CONST);
8987 rtx new = assign_temp (nt, 0, 1, 1);
8989 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
8990 unsignedp, NULL_RTX, ext_mode,
8991 ext_mode, alignment,
8992 int_size_in_bytes (TREE_TYPE (tem)));
8994 /* If the result is a record type and BITSIZE is narrower than
8995 the mode of OP0, an integral mode, and this is a big endian
8996 machine, we must put the field into the high-order bits. */
8997 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
8998 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8999 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
9000 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9001 size_int (GET_MODE_BITSIZE
9002 (GET_MODE (op0))
9003 - bitsize),
9004 op0, 1);
9006 emit_move_insn (new, op0);
9007 op0 = copy_rtx (new);
9008 PUT_MODE (op0, BLKmode);
9011 else
9012 /* Get a reference to just this component. */
9013 op0 = change_address (op0, mode1,
9014 plus_constant (XEXP (op0, 0),
9015 (bitpos / BITS_PER_UNIT)));
9017 MEM_ALIAS_SET (op0) = get_alias_set (exp);
9019 /* Adjust the alignment in case the bit position is not
9020 a multiple of the alignment of the inner object. */
9021 while (bitpos % alignment != 0)
9022 alignment >>= 1;
9024 if (GET_CODE (XEXP (op0, 0)) == REG)
9025 mark_reg_pointer (XEXP (op0, 0), alignment);
9027 MEM_IN_STRUCT_P (op0) = 1;
9028 MEM_VOLATILE_P (op0) |= volatilep;
9030 *palign = alignment;
9031 return op0;
9034 default:
9035 break;
9039 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9042 /* Return the tree node if a ARG corresponds to a string constant or zero
9043 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9044 in bytes within the string that ARG is accessing. The type of the
9045 offset will be `sizetype'. */
9047 tree
9048 string_constant (arg, ptr_offset)
9049 tree arg;
9050 tree *ptr_offset;
9052 STRIP_NOPS (arg);
9054 if (TREE_CODE (arg) == ADDR_EXPR
9055 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9057 *ptr_offset = size_zero_node;
9058 return TREE_OPERAND (arg, 0);
9060 else if (TREE_CODE (arg) == PLUS_EXPR)
9062 tree arg0 = TREE_OPERAND (arg, 0);
9063 tree arg1 = TREE_OPERAND (arg, 1);
9065 STRIP_NOPS (arg0);
9066 STRIP_NOPS (arg1);
9068 if (TREE_CODE (arg0) == ADDR_EXPR
9069 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9071 *ptr_offset = convert (sizetype, arg1);
9072 return TREE_OPERAND (arg0, 0);
9074 else if (TREE_CODE (arg1) == ADDR_EXPR
9075 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9077 *ptr_offset = convert (sizetype, arg0);
9078 return TREE_OPERAND (arg1, 0);
9082 return 0;
9085 /* Expand code for a post- or pre- increment or decrement
9086 and return the RTX for the result.
9087 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9089 static rtx
9090 expand_increment (exp, post, ignore)
9091 register tree exp;
9092 int post, ignore;
9094 register rtx op0, op1;
9095 register rtx temp, value;
9096 register tree incremented = TREE_OPERAND (exp, 0);
9097 optab this_optab = add_optab;
9098 int icode;
9099 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9100 int op0_is_copy = 0;
9101 int single_insn = 0;
9102 /* 1 means we can't store into OP0 directly,
9103 because it is a subreg narrower than a word,
9104 and we don't dare clobber the rest of the word. */
9105 int bad_subreg = 0;
9107 /* Stabilize any component ref that might need to be
9108 evaluated more than once below. */
9109 if (!post
9110 || TREE_CODE (incremented) == BIT_FIELD_REF
9111 || (TREE_CODE (incremented) == COMPONENT_REF
9112 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9113 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9114 incremented = stabilize_reference (incremented);
9115 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9116 ones into save exprs so that they don't accidentally get evaluated
9117 more than once by the code below. */
9118 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9119 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9120 incremented = save_expr (incremented);
9122 /* Compute the operands as RTX.
9123 Note whether OP0 is the actual lvalue or a copy of it:
9124 I believe it is a copy iff it is a register or subreg
9125 and insns were generated in computing it. */
9127 temp = get_last_insn ();
9128 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9130 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9131 in place but instead must do sign- or zero-extension during assignment,
9132 so we copy it into a new register and let the code below use it as
9133 a copy.
9135 Note that we can safely modify this SUBREG since it is know not to be
9136 shared (it was made by the expand_expr call above). */
9138 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9140 if (post)
9141 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9142 else
9143 bad_subreg = 1;
9145 else if (GET_CODE (op0) == SUBREG
9146 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9148 /* We cannot increment this SUBREG in place. If we are
9149 post-incrementing, get a copy of the old value. Otherwise,
9150 just mark that we cannot increment in place. */
9151 if (post)
9152 op0 = copy_to_reg (op0);
9153 else
9154 bad_subreg = 1;
9157 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9158 && temp != get_last_insn ());
9159 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9160 EXPAND_MEMORY_USE_BAD);
9162 /* Decide whether incrementing or decrementing. */
9163 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9164 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9165 this_optab = sub_optab;
9167 /* Convert decrement by a constant into a negative increment. */
9168 if (this_optab == sub_optab
9169 && GET_CODE (op1) == CONST_INT)
9171 op1 = GEN_INT (-INTVAL (op1));
9172 this_optab = add_optab;
9175 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9176 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9178 /* For a preincrement, see if we can do this with a single instruction. */
9179 if (!post)
9181 icode = (int) this_optab->handlers[(int) mode].insn_code;
9182 if (icode != (int) CODE_FOR_nothing
9183 /* Make sure that OP0 is valid for operands 0 and 1
9184 of the insn we want to queue. */
9185 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9186 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9187 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9188 single_insn = 1;
9191 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9192 then we cannot just increment OP0. We must therefore contrive to
9193 increment the original value. Then, for postincrement, we can return
9194 OP0 since it is a copy of the old value. For preincrement, expand here
9195 unless we can do it with a single insn.
9197 Likewise if storing directly into OP0 would clobber high bits
9198 we need to preserve (bad_subreg). */
9199 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9201 /* This is the easiest way to increment the value wherever it is.
9202 Problems with multiple evaluation of INCREMENTED are prevented
9203 because either (1) it is a component_ref or preincrement,
9204 in which case it was stabilized above, or (2) it is an array_ref
9205 with constant index in an array in a register, which is
9206 safe to reevaluate. */
9207 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9208 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9209 ? MINUS_EXPR : PLUS_EXPR),
9210 TREE_TYPE (exp),
9211 incremented,
9212 TREE_OPERAND (exp, 1));
9214 while (TREE_CODE (incremented) == NOP_EXPR
9215 || TREE_CODE (incremented) == CONVERT_EXPR)
9217 newexp = convert (TREE_TYPE (incremented), newexp);
9218 incremented = TREE_OPERAND (incremented, 0);
9221 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9222 return post ? op0 : temp;
9225 if (post)
9227 /* We have a true reference to the value in OP0.
9228 If there is an insn to add or subtract in this mode, queue it.
9229 Queueing the increment insn avoids the register shuffling
9230 that often results if we must increment now and first save
9231 the old value for subsequent use. */
9233 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9234 op0 = stabilize (op0);
9235 #endif
9237 icode = (int) this_optab->handlers[(int) mode].insn_code;
9238 if (icode != (int) CODE_FOR_nothing
9239 /* Make sure that OP0 is valid for operands 0 and 1
9240 of the insn we want to queue. */
9241 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9242 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9244 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9245 op1 = force_reg (mode, op1);
9247 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9249 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9251 rtx addr = (general_operand (XEXP (op0, 0), mode)
9252 ? force_reg (Pmode, XEXP (op0, 0))
9253 : copy_to_reg (XEXP (op0, 0)));
9254 rtx temp, result;
9256 op0 = change_address (op0, VOIDmode, addr);
9257 temp = force_reg (GET_MODE (op0), op0);
9258 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9259 op1 = force_reg (mode, op1);
9261 /* The increment queue is LIFO, thus we have to `queue'
9262 the instructions in reverse order. */
9263 enqueue_insn (op0, gen_move_insn (op0, temp));
9264 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9265 return result;
9269 /* Preincrement, or we can't increment with one simple insn. */
9270 if (post)
9271 /* Save a copy of the value before inc or dec, to return it later. */
9272 temp = value = copy_to_reg (op0);
9273 else
9274 /* Arrange to return the incremented value. */
9275 /* Copy the rtx because expand_binop will protect from the queue,
9276 and the results of that would be invalid for us to return
9277 if our caller does emit_queue before using our result. */
9278 temp = copy_rtx (value = op0);
9280 /* Increment however we can. */
9281 op1 = expand_binop (mode, this_optab, value, op1,
9282 current_function_check_memory_usage ? NULL_RTX : op0,
9283 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9284 /* Make sure the value is stored into OP0. */
9285 if (op1 != op0)
9286 emit_move_insn (op0, op1);
9288 return temp;
9291 /* At the start of a function, record that we have no previously-pushed
9292 arguments waiting to be popped. */
9294 void
9295 init_pending_stack_adjust ()
9297 pending_stack_adjust = 0;
9300 /* When exiting from function, if safe, clear out any pending stack adjust
9301 so the adjustment won't get done.
9303 Note, if the current function calls alloca, then it must have a
9304 frame pointer regardless of the value of flag_omit_frame_pointer. */
9306 void
9307 clear_pending_stack_adjust ()
9309 #ifdef EXIT_IGNORE_STACK
9310 if (optimize > 0
9311 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9312 && EXIT_IGNORE_STACK
9313 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9314 && ! flag_inline_functions)
9316 stack_pointer_delta -= pending_stack_adjust,
9317 pending_stack_adjust = 0;
9319 #endif
9322 /* Pop any previously-pushed arguments that have not been popped yet. */
9324 void
9325 do_pending_stack_adjust ()
9327 if (inhibit_defer_pop == 0)
9329 if (pending_stack_adjust != 0)
9330 adjust_stack (GEN_INT (pending_stack_adjust));
9331 pending_stack_adjust = 0;
9335 /* Expand conditional expressions. */
9337 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9338 LABEL is an rtx of code CODE_LABEL, in this function and all the
9339 functions here. */
9341 void
9342 jumpifnot (exp, label)
9343 tree exp;
9344 rtx label;
9346 do_jump (exp, label, NULL_RTX);
9349 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9351 void
9352 jumpif (exp, label)
9353 tree exp;
9354 rtx label;
9356 do_jump (exp, NULL_RTX, label);
9359 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9360 the result is zero, or IF_TRUE_LABEL if the result is one.
9361 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9362 meaning fall through in that case.
9364 do_jump always does any pending stack adjust except when it does not
9365 actually perform a jump. An example where there is no jump
9366 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9368 This function is responsible for optimizing cases such as
9369 &&, || and comparison operators in EXP. */
9371 void
9372 do_jump (exp, if_false_label, if_true_label)
9373 tree exp;
9374 rtx if_false_label, if_true_label;
9376 register enum tree_code code = TREE_CODE (exp);
9377 /* Some cases need to create a label to jump to
9378 in order to properly fall through.
9379 These cases set DROP_THROUGH_LABEL nonzero. */
9380 rtx drop_through_label = 0;
9381 rtx temp;
9382 int i;
9383 tree type;
9384 enum machine_mode mode;
9386 #ifdef MAX_INTEGER_COMPUTATION_MODE
9387 check_max_integer_computation_mode (exp);
9388 #endif
9390 emit_queue ();
9392 switch (code)
9394 case ERROR_MARK:
9395 break;
9397 case INTEGER_CST:
9398 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9399 if (temp)
9400 emit_jump (temp);
9401 break;
9403 #if 0
9404 /* This is not true with #pragma weak */
9405 case ADDR_EXPR:
9406 /* The address of something can never be zero. */
9407 if (if_true_label)
9408 emit_jump (if_true_label);
9409 break;
9410 #endif
9412 case NOP_EXPR:
9413 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9414 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9415 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9416 goto normal;
9417 case CONVERT_EXPR:
9418 /* If we are narrowing the operand, we have to do the compare in the
9419 narrower mode. */
9420 if ((TYPE_PRECISION (TREE_TYPE (exp))
9421 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9422 goto normal;
9423 case NON_LVALUE_EXPR:
9424 case REFERENCE_EXPR:
9425 case ABS_EXPR:
9426 case NEGATE_EXPR:
9427 case LROTATE_EXPR:
9428 case RROTATE_EXPR:
9429 /* These cannot change zero->non-zero or vice versa. */
9430 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9431 break;
9433 case WITH_RECORD_EXPR:
9434 /* Put the object on the placeholder list, recurse through our first
9435 operand, and pop the list. */
9436 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9437 placeholder_list);
9438 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9439 placeholder_list = TREE_CHAIN (placeholder_list);
9440 break;
9442 #if 0
9443 /* This is never less insns than evaluating the PLUS_EXPR followed by
9444 a test and can be longer if the test is eliminated. */
9445 case PLUS_EXPR:
9446 /* Reduce to minus. */
9447 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9448 TREE_OPERAND (exp, 0),
9449 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9450 TREE_OPERAND (exp, 1))));
9451 /* Process as MINUS. */
9452 #endif
9454 case MINUS_EXPR:
9455 /* Non-zero iff operands of minus differ. */
9456 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9457 TREE_OPERAND (exp, 0),
9458 TREE_OPERAND (exp, 1)),
9459 NE, NE, if_false_label, if_true_label);
9460 break;
9462 case BIT_AND_EXPR:
9463 /* If we are AND'ing with a small constant, do this comparison in the
9464 smallest type that fits. If the machine doesn't have comparisons
9465 that small, it will be converted back to the wider comparison.
9466 This helps if we are testing the sign bit of a narrower object.
9467 combine can't do this for us because it can't know whether a
9468 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9470 if (! SLOW_BYTE_ACCESS
9471 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9472 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9473 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9474 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9475 && (type = type_for_mode (mode, 1)) != 0
9476 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9477 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9478 != CODE_FOR_nothing))
9480 do_jump (convert (type, exp), if_false_label, if_true_label);
9481 break;
9483 goto normal;
9485 case TRUTH_NOT_EXPR:
9486 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9487 break;
9489 case TRUTH_ANDIF_EXPR:
9490 if (if_false_label == 0)
9491 if_false_label = drop_through_label = gen_label_rtx ();
9492 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9493 start_cleanup_deferral ();
9494 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9495 end_cleanup_deferral ();
9496 break;
9498 case TRUTH_ORIF_EXPR:
9499 if (if_true_label == 0)
9500 if_true_label = drop_through_label = gen_label_rtx ();
9501 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9502 start_cleanup_deferral ();
9503 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9504 end_cleanup_deferral ();
9505 break;
9507 case COMPOUND_EXPR:
9508 push_temp_slots ();
9509 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9510 preserve_temp_slots (NULL_RTX);
9511 free_temp_slots ();
9512 pop_temp_slots ();
9513 emit_queue ();
9514 do_pending_stack_adjust ();
9515 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9516 break;
9518 case COMPONENT_REF:
9519 case BIT_FIELD_REF:
9520 case ARRAY_REF:
9522 HOST_WIDE_INT bitsize, bitpos;
9523 int unsignedp;
9524 enum machine_mode mode;
9525 tree type;
9526 tree offset;
9527 int volatilep = 0;
9528 unsigned int alignment;
9530 /* Get description of this reference. We don't actually care
9531 about the underlying object here. */
9532 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9533 &unsignedp, &volatilep, &alignment);
9535 type = type_for_size (bitsize, unsignedp);
9536 if (! SLOW_BYTE_ACCESS
9537 && type != 0 && bitsize >= 0
9538 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9539 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9540 != CODE_FOR_nothing))
9542 do_jump (convert (type, exp), if_false_label, if_true_label);
9543 break;
9545 goto normal;
9548 case COND_EXPR:
9549 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9550 if (integer_onep (TREE_OPERAND (exp, 1))
9551 && integer_zerop (TREE_OPERAND (exp, 2)))
9552 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9554 else if (integer_zerop (TREE_OPERAND (exp, 1))
9555 && integer_onep (TREE_OPERAND (exp, 2)))
9556 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9558 else
9560 register rtx label1 = gen_label_rtx ();
9561 drop_through_label = gen_label_rtx ();
9563 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9565 start_cleanup_deferral ();
9566 /* Now the THEN-expression. */
9567 do_jump (TREE_OPERAND (exp, 1),
9568 if_false_label ? if_false_label : drop_through_label,
9569 if_true_label ? if_true_label : drop_through_label);
9570 /* In case the do_jump just above never jumps. */
9571 do_pending_stack_adjust ();
9572 emit_label (label1);
9574 /* Now the ELSE-expression. */
9575 do_jump (TREE_OPERAND (exp, 2),
9576 if_false_label ? if_false_label : drop_through_label,
9577 if_true_label ? if_true_label : drop_through_label);
9578 end_cleanup_deferral ();
9580 break;
9582 case EQ_EXPR:
9584 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9586 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9587 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9589 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9590 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9591 do_jump
9592 (fold
9593 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9594 fold (build (EQ_EXPR, TREE_TYPE (exp),
9595 fold (build1 (REALPART_EXPR,
9596 TREE_TYPE (inner_type),
9597 exp0)),
9598 fold (build1 (REALPART_EXPR,
9599 TREE_TYPE (inner_type),
9600 exp1)))),
9601 fold (build (EQ_EXPR, TREE_TYPE (exp),
9602 fold (build1 (IMAGPART_EXPR,
9603 TREE_TYPE (inner_type),
9604 exp0)),
9605 fold (build1 (IMAGPART_EXPR,
9606 TREE_TYPE (inner_type),
9607 exp1)))))),
9608 if_false_label, if_true_label);
9611 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9612 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9614 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9615 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9616 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9617 else
9618 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9619 break;
9622 case NE_EXPR:
9624 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9626 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9627 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9629 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9630 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9631 do_jump
9632 (fold
9633 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9634 fold (build (NE_EXPR, TREE_TYPE (exp),
9635 fold (build1 (REALPART_EXPR,
9636 TREE_TYPE (inner_type),
9637 exp0)),
9638 fold (build1 (REALPART_EXPR,
9639 TREE_TYPE (inner_type),
9640 exp1)))),
9641 fold (build (NE_EXPR, TREE_TYPE (exp),
9642 fold (build1 (IMAGPART_EXPR,
9643 TREE_TYPE (inner_type),
9644 exp0)),
9645 fold (build1 (IMAGPART_EXPR,
9646 TREE_TYPE (inner_type),
9647 exp1)))))),
9648 if_false_label, if_true_label);
9651 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9652 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9654 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9655 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9656 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9657 else
9658 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9659 break;
9662 case LT_EXPR:
9663 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9664 if (GET_MODE_CLASS (mode) == MODE_INT
9665 && ! can_compare_p (LT, mode, ccp_jump))
9666 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9667 else
9668 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9669 break;
9671 case LE_EXPR:
9672 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9673 if (GET_MODE_CLASS (mode) == MODE_INT
9674 && ! can_compare_p (LE, mode, ccp_jump))
9675 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9676 else
9677 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9678 break;
9680 case GT_EXPR:
9681 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9682 if (GET_MODE_CLASS (mode) == MODE_INT
9683 && ! can_compare_p (GT, mode, ccp_jump))
9684 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9685 else
9686 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9687 break;
9689 case GE_EXPR:
9690 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9691 if (GET_MODE_CLASS (mode) == MODE_INT
9692 && ! can_compare_p (GE, mode, ccp_jump))
9693 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9694 else
9695 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9696 break;
9698 case UNORDERED_EXPR:
9699 case ORDERED_EXPR:
9701 enum rtx_code cmp, rcmp;
9702 int do_rev;
9704 if (code == UNORDERED_EXPR)
9705 cmp = UNORDERED, rcmp = ORDERED;
9706 else
9707 cmp = ORDERED, rcmp = UNORDERED;
9708 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9710 do_rev = 0;
9711 if (! can_compare_p (cmp, mode, ccp_jump)
9712 && (can_compare_p (rcmp, mode, ccp_jump)
9713 /* If the target doesn't provide either UNORDERED or ORDERED
9714 comparisons, canonicalize on UNORDERED for the library. */
9715 || rcmp == UNORDERED))
9716 do_rev = 1;
9718 if (! do_rev)
9719 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9720 else
9721 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9723 break;
9726 enum rtx_code rcode1;
9727 enum tree_code tcode2;
9729 case UNLT_EXPR:
9730 rcode1 = UNLT;
9731 tcode2 = LT_EXPR;
9732 goto unordered_bcc;
9733 case UNLE_EXPR:
9734 rcode1 = UNLE;
9735 tcode2 = LE_EXPR;
9736 goto unordered_bcc;
9737 case UNGT_EXPR:
9738 rcode1 = UNGT;
9739 tcode2 = GT_EXPR;
9740 goto unordered_bcc;
9741 case UNGE_EXPR:
9742 rcode1 = UNGE;
9743 tcode2 = GE_EXPR;
9744 goto unordered_bcc;
9745 case UNEQ_EXPR:
9746 rcode1 = UNEQ;
9747 tcode2 = EQ_EXPR;
9748 goto unordered_bcc;
9750 unordered_bcc:
9751 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9752 if (can_compare_p (rcode1, mode, ccp_jump))
9753 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9754 if_true_label);
9755 else
9757 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9758 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9759 tree cmp0, cmp1;
9761 /* If the target doesn't support combined unordered
9762 compares, decompose into UNORDERED + comparison. */
9763 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9764 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9765 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9766 do_jump (exp, if_false_label, if_true_label);
9769 break;
9771 default:
9772 normal:
9773 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9774 #if 0
9775 /* This is not needed any more and causes poor code since it causes
9776 comparisons and tests from non-SI objects to have different code
9777 sequences. */
9778 /* Copy to register to avoid generating bad insns by cse
9779 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9780 if (!cse_not_expected && GET_CODE (temp) == MEM)
9781 temp = copy_to_reg (temp);
9782 #endif
9783 do_pending_stack_adjust ();
9784 /* Do any postincrements in the expression that was tested. */
9785 emit_queue ();
9787 if (GET_CODE (temp) == CONST_INT
9788 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9789 || GET_CODE (temp) == LABEL_REF)
9791 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9792 if (target)
9793 emit_jump (target);
9795 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9796 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9797 /* Note swapping the labels gives us not-equal. */
9798 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9799 else if (GET_MODE (temp) != VOIDmode)
9800 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9801 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9802 GET_MODE (temp), NULL_RTX, 0,
9803 if_false_label, if_true_label);
9804 else
9805 abort ();
9808 if (drop_through_label)
9810 /* If do_jump produces code that might be jumped around,
9811 do any stack adjusts from that code, before the place
9812 where control merges in. */
9813 do_pending_stack_adjust ();
9814 emit_label (drop_through_label);
9818 /* Given a comparison expression EXP for values too wide to be compared
9819 with one insn, test the comparison and jump to the appropriate label.
9820 The code of EXP is ignored; we always test GT if SWAP is 0,
9821 and LT if SWAP is 1. */
9823 static void
9824 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9825 tree exp;
9826 int swap;
9827 rtx if_false_label, if_true_label;
9829 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9830 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9831 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9832 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9834 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9837 /* Compare OP0 with OP1, word at a time, in mode MODE.
9838 UNSIGNEDP says to do unsigned comparison.
9839 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9841 void
9842 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9843 enum machine_mode mode;
9844 int unsignedp;
9845 rtx op0, op1;
9846 rtx if_false_label, if_true_label;
9848 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9849 rtx drop_through_label = 0;
9850 int i;
9852 if (! if_true_label || ! if_false_label)
9853 drop_through_label = gen_label_rtx ();
9854 if (! if_true_label)
9855 if_true_label = drop_through_label;
9856 if (! if_false_label)
9857 if_false_label = drop_through_label;
9859 /* Compare a word at a time, high order first. */
9860 for (i = 0; i < nwords; i++)
9862 rtx op0_word, op1_word;
9864 if (WORDS_BIG_ENDIAN)
9866 op0_word = operand_subword_force (op0, i, mode);
9867 op1_word = operand_subword_force (op1, i, mode);
9869 else
9871 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9872 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9875 /* All but high-order word must be compared as unsigned. */
9876 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9877 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9878 NULL_RTX, if_true_label);
9880 /* Consider lower words only if these are equal. */
9881 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9882 NULL_RTX, 0, NULL_RTX, if_false_label);
9885 if (if_false_label)
9886 emit_jump (if_false_label);
9887 if (drop_through_label)
9888 emit_label (drop_through_label);
9891 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9892 with one insn, test the comparison and jump to the appropriate label. */
9894 static void
9895 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9896 tree exp;
9897 rtx if_false_label, if_true_label;
9899 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9900 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9901 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9902 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9903 int i;
9904 rtx drop_through_label = 0;
9906 if (! if_false_label)
9907 drop_through_label = if_false_label = gen_label_rtx ();
9909 for (i = 0; i < nwords; i++)
9910 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9911 operand_subword_force (op1, i, mode),
9912 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9913 word_mode, NULL_RTX, 0, if_false_label,
9914 NULL_RTX);
9916 if (if_true_label)
9917 emit_jump (if_true_label);
9918 if (drop_through_label)
9919 emit_label (drop_through_label);
9922 /* Jump according to whether OP0 is 0.
9923 We assume that OP0 has an integer mode that is too wide
9924 for the available compare insns. */
9926 void
9927 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9928 rtx op0;
9929 rtx if_false_label, if_true_label;
9931 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9932 rtx part;
9933 int i;
9934 rtx drop_through_label = 0;
9936 /* The fastest way of doing this comparison on almost any machine is to
9937 "or" all the words and compare the result. If all have to be loaded
9938 from memory and this is a very wide item, it's possible this may
9939 be slower, but that's highly unlikely. */
9941 part = gen_reg_rtx (word_mode);
9942 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9943 for (i = 1; i < nwords && part != 0; i++)
9944 part = expand_binop (word_mode, ior_optab, part,
9945 operand_subword_force (op0, i, GET_MODE (op0)),
9946 part, 1, OPTAB_WIDEN);
9948 if (part != 0)
9950 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9951 NULL_RTX, 0, if_false_label, if_true_label);
9953 return;
9956 /* If we couldn't do the "or" simply, do this with a series of compares. */
9957 if (! if_false_label)
9958 drop_through_label = if_false_label = gen_label_rtx ();
9960 for (i = 0; i < nwords; i++)
9961 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9962 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9963 if_false_label, NULL_RTX);
9965 if (if_true_label)
9966 emit_jump (if_true_label);
9968 if (drop_through_label)
9969 emit_label (drop_through_label);
9972 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9973 (including code to compute the values to be compared)
9974 and set (CC0) according to the result.
9975 The decision as to signed or unsigned comparison must be made by the caller.
9977 We force a stack adjustment unless there are currently
9978 things pushed on the stack that aren't yet used.
9980 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9981 compared.
9983 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9984 size of MODE should be used. */
9987 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9988 register rtx op0, op1;
9989 enum rtx_code code;
9990 int unsignedp;
9991 enum machine_mode mode;
9992 rtx size;
9993 unsigned int align;
9995 rtx tem;
9997 /* If one operand is constant, make it the second one. Only do this
9998 if the other operand is not constant as well. */
10000 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10001 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10003 tem = op0;
10004 op0 = op1;
10005 op1 = tem;
10006 code = swap_condition (code);
10009 if (flag_force_mem)
10011 op0 = force_not_mem (op0);
10012 op1 = force_not_mem (op1);
10015 do_pending_stack_adjust ();
10017 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10018 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10019 return tem;
10021 #if 0
10022 /* There's no need to do this now that combine.c can eliminate lots of
10023 sign extensions. This can be less efficient in certain cases on other
10024 machines. */
10026 /* If this is a signed equality comparison, we can do it as an
10027 unsigned comparison since zero-extension is cheaper than sign
10028 extension and comparisons with zero are done as unsigned. This is
10029 the case even on machines that can do fast sign extension, since
10030 zero-extension is easier to combine with other operations than
10031 sign-extension is. If we are comparing against a constant, we must
10032 convert it to what it would look like unsigned. */
10033 if ((code == EQ || code == NE) && ! unsignedp
10034 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10036 if (GET_CODE (op1) == CONST_INT
10037 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10038 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10039 unsignedp = 1;
10041 #endif
10043 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10045 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10048 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10049 The decision as to signed or unsigned comparison must be made by the caller.
10051 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10052 compared.
10054 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10055 size of MODE should be used. */
10057 void
10058 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
10059 if_false_label, if_true_label)
10060 register rtx op0, op1;
10061 enum rtx_code code;
10062 int unsignedp;
10063 enum machine_mode mode;
10064 rtx size;
10065 unsigned int align;
10066 rtx if_false_label, if_true_label;
10068 rtx tem;
10069 int dummy_true_label = 0;
10071 /* Reverse the comparison if that is safe and we want to jump if it is
10072 false. */
10073 if (! if_true_label && ! FLOAT_MODE_P (mode))
10075 if_true_label = if_false_label;
10076 if_false_label = 0;
10077 code = reverse_condition (code);
10080 /* If one operand is constant, make it the second one. Only do this
10081 if the other operand is not constant as well. */
10083 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10084 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10086 tem = op0;
10087 op0 = op1;
10088 op1 = tem;
10089 code = swap_condition (code);
10092 if (flag_force_mem)
10094 op0 = force_not_mem (op0);
10095 op1 = force_not_mem (op1);
10098 do_pending_stack_adjust ();
10100 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10101 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10103 if (tem == const_true_rtx)
10105 if (if_true_label)
10106 emit_jump (if_true_label);
10108 else
10110 if (if_false_label)
10111 emit_jump (if_false_label);
10113 return;
10116 #if 0
10117 /* There's no need to do this now that combine.c can eliminate lots of
10118 sign extensions. This can be less efficient in certain cases on other
10119 machines. */
10121 /* If this is a signed equality comparison, we can do it as an
10122 unsigned comparison since zero-extension is cheaper than sign
10123 extension and comparisons with zero are done as unsigned. This is
10124 the case even on machines that can do fast sign extension, since
10125 zero-extension is easier to combine with other operations than
10126 sign-extension is. If we are comparing against a constant, we must
10127 convert it to what it would look like unsigned. */
10128 if ((code == EQ || code == NE) && ! unsignedp
10129 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10131 if (GET_CODE (op1) == CONST_INT
10132 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10133 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10134 unsignedp = 1;
10136 #endif
10138 if (! if_true_label)
10140 dummy_true_label = 1;
10141 if_true_label = gen_label_rtx ();
10144 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10145 if_true_label);
10147 if (if_false_label)
10148 emit_jump (if_false_label);
10149 if (dummy_true_label)
10150 emit_label (if_true_label);
10153 /* Generate code for a comparison expression EXP (including code to compute
10154 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10155 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10156 generated code will drop through.
10157 SIGNED_CODE should be the rtx operation for this comparison for
10158 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10160 We force a stack adjustment unless there are currently
10161 things pushed on the stack that aren't yet used. */
10163 static void
10164 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10165 if_true_label)
10166 register tree exp;
10167 enum rtx_code signed_code, unsigned_code;
10168 rtx if_false_label, if_true_label;
10170 unsigned int align0, align1;
10171 register rtx op0, op1;
10172 register tree type;
10173 register enum machine_mode mode;
10174 int unsignedp;
10175 enum rtx_code code;
10177 /* Don't crash if the comparison was erroneous. */
10178 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10179 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10180 return;
10182 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10183 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10184 return;
10186 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10187 mode = TYPE_MODE (type);
10188 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10189 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10190 || (GET_MODE_BITSIZE (mode)
10191 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10192 1)))))))
10194 /* op0 might have been replaced by promoted constant, in which
10195 case the type of second argument should be used. */
10196 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10197 mode = TYPE_MODE (type);
10199 unsignedp = TREE_UNSIGNED (type);
10200 code = unsignedp ? unsigned_code : signed_code;
10202 #ifdef HAVE_canonicalize_funcptr_for_compare
10203 /* If function pointers need to be "canonicalized" before they can
10204 be reliably compared, then canonicalize them. */
10205 if (HAVE_canonicalize_funcptr_for_compare
10206 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10207 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10208 == FUNCTION_TYPE))
10210 rtx new_op0 = gen_reg_rtx (mode);
10212 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10213 op0 = new_op0;
10216 if (HAVE_canonicalize_funcptr_for_compare
10217 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10218 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10219 == FUNCTION_TYPE))
10221 rtx new_op1 = gen_reg_rtx (mode);
10223 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10224 op1 = new_op1;
10226 #endif
10228 /* Do any postincrements in the expression that was tested. */
10229 emit_queue ();
10231 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10232 ((mode == BLKmode)
10233 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10234 MIN (align0, align1),
10235 if_false_label, if_true_label);
10238 /* Generate code to calculate EXP using a store-flag instruction
10239 and return an rtx for the result. EXP is either a comparison
10240 or a TRUTH_NOT_EXPR whose operand is a comparison.
10242 If TARGET is nonzero, store the result there if convenient.
10244 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10245 cheap.
10247 Return zero if there is no suitable set-flag instruction
10248 available on this machine.
10250 Once expand_expr has been called on the arguments of the comparison,
10251 we are committed to doing the store flag, since it is not safe to
10252 re-evaluate the expression. We emit the store-flag insn by calling
10253 emit_store_flag, but only expand the arguments if we have a reason
10254 to believe that emit_store_flag will be successful. If we think that
10255 it will, but it isn't, we have to simulate the store-flag with a
10256 set/jump/set sequence. */
10258 static rtx
10259 do_store_flag (exp, target, mode, only_cheap)
10260 tree exp;
10261 rtx target;
10262 enum machine_mode mode;
10263 int only_cheap;
10265 enum rtx_code code;
10266 tree arg0, arg1, type;
10267 tree tem;
10268 enum machine_mode operand_mode;
10269 int invert = 0;
10270 int unsignedp;
10271 rtx op0, op1;
10272 enum insn_code icode;
10273 rtx subtarget = target;
10274 rtx result, label;
10276 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10277 result at the end. We can't simply invert the test since it would
10278 have already been inverted if it were valid. This case occurs for
10279 some floating-point comparisons. */
10281 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10282 invert = 1, exp = TREE_OPERAND (exp, 0);
10284 arg0 = TREE_OPERAND (exp, 0);
10285 arg1 = TREE_OPERAND (exp, 1);
10287 /* Don't crash if the comparison was erroneous. */
10288 if (arg0 == error_mark_node || arg1 == error_mark_node)
10289 return const0_rtx;
10291 type = TREE_TYPE (arg0);
10292 operand_mode = TYPE_MODE (type);
10293 unsignedp = TREE_UNSIGNED (type);
10295 /* We won't bother with BLKmode store-flag operations because it would mean
10296 passing a lot of information to emit_store_flag. */
10297 if (operand_mode == BLKmode)
10298 return 0;
10300 /* We won't bother with store-flag operations involving function pointers
10301 when function pointers must be canonicalized before comparisons. */
10302 #ifdef HAVE_canonicalize_funcptr_for_compare
10303 if (HAVE_canonicalize_funcptr_for_compare
10304 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10305 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10306 == FUNCTION_TYPE))
10307 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10308 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10309 == FUNCTION_TYPE))))
10310 return 0;
10311 #endif
10313 STRIP_NOPS (arg0);
10314 STRIP_NOPS (arg1);
10316 /* Get the rtx comparison code to use. We know that EXP is a comparison
10317 operation of some type. Some comparisons against 1 and -1 can be
10318 converted to comparisons with zero. Do so here so that the tests
10319 below will be aware that we have a comparison with zero. These
10320 tests will not catch constants in the first operand, but constants
10321 are rarely passed as the first operand. */
10323 switch (TREE_CODE (exp))
10325 case EQ_EXPR:
10326 code = EQ;
10327 break;
10328 case NE_EXPR:
10329 code = NE;
10330 break;
10331 case LT_EXPR:
10332 if (integer_onep (arg1))
10333 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10334 else
10335 code = unsignedp ? LTU : LT;
10336 break;
10337 case LE_EXPR:
10338 if (! unsignedp && integer_all_onesp (arg1))
10339 arg1 = integer_zero_node, code = LT;
10340 else
10341 code = unsignedp ? LEU : LE;
10342 break;
10343 case GT_EXPR:
10344 if (! unsignedp && integer_all_onesp (arg1))
10345 arg1 = integer_zero_node, code = GE;
10346 else
10347 code = unsignedp ? GTU : GT;
10348 break;
10349 case GE_EXPR:
10350 if (integer_onep (arg1))
10351 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10352 else
10353 code = unsignedp ? GEU : GE;
10354 break;
10356 case UNORDERED_EXPR:
10357 code = UNORDERED;
10358 break;
10359 case ORDERED_EXPR:
10360 code = ORDERED;
10361 break;
10362 case UNLT_EXPR:
10363 code = UNLT;
10364 break;
10365 case UNLE_EXPR:
10366 code = UNLE;
10367 break;
10368 case UNGT_EXPR:
10369 code = UNGT;
10370 break;
10371 case UNGE_EXPR:
10372 code = UNGE;
10373 break;
10374 case UNEQ_EXPR:
10375 code = UNEQ;
10376 break;
10378 default:
10379 abort ();
10382 /* Put a constant second. */
10383 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10385 tem = arg0; arg0 = arg1; arg1 = tem;
10386 code = swap_condition (code);
10389 /* If this is an equality or inequality test of a single bit, we can
10390 do this by shifting the bit being tested to the low-order bit and
10391 masking the result with the constant 1. If the condition was EQ,
10392 we xor it with 1. This does not require an scc insn and is faster
10393 than an scc insn even if we have it. */
10395 if ((code == NE || code == EQ)
10396 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10397 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10399 tree inner = TREE_OPERAND (arg0, 0);
10400 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10401 int ops_unsignedp;
10403 /* If INNER is a right shift of a constant and it plus BITNUM does
10404 not overflow, adjust BITNUM and INNER. */
10406 if (TREE_CODE (inner) == RSHIFT_EXPR
10407 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10408 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10409 && bitnum < TYPE_PRECISION (type)
10410 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10411 bitnum - TYPE_PRECISION (type)))
10413 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10414 inner = TREE_OPERAND (inner, 0);
10417 /* If we are going to be able to omit the AND below, we must do our
10418 operations as unsigned. If we must use the AND, we have a choice.
10419 Normally unsigned is faster, but for some machines signed is. */
10420 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10421 #ifdef LOAD_EXTEND_OP
10422 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10423 #else
10425 #endif
10428 if (! get_subtarget (subtarget)
10429 || GET_MODE (subtarget) != operand_mode
10430 || ! safe_from_p (subtarget, inner, 1))
10431 subtarget = 0;
10433 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10435 if (bitnum != 0)
10436 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10437 size_int (bitnum), subtarget, ops_unsignedp);
10439 if (GET_MODE (op0) != mode)
10440 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10442 if ((code == EQ && ! invert) || (code == NE && invert))
10443 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10444 ops_unsignedp, OPTAB_LIB_WIDEN);
10446 /* Put the AND last so it can combine with more things. */
10447 if (bitnum != TYPE_PRECISION (type) - 1)
10448 op0 = expand_and (op0, const1_rtx, subtarget);
10450 return op0;
10453 /* Now see if we are likely to be able to do this. Return if not. */
10454 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10455 return 0;
10457 icode = setcc_gen_code[(int) code];
10458 if (icode == CODE_FOR_nothing
10459 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10461 /* We can only do this if it is one of the special cases that
10462 can be handled without an scc insn. */
10463 if ((code == LT && integer_zerop (arg1))
10464 || (! only_cheap && code == GE && integer_zerop (arg1)))
10466 else if (BRANCH_COST >= 0
10467 && ! only_cheap && (code == NE || code == EQ)
10468 && TREE_CODE (type) != REAL_TYPE
10469 && ((abs_optab->handlers[(int) operand_mode].insn_code
10470 != CODE_FOR_nothing)
10471 || (ffs_optab->handlers[(int) operand_mode].insn_code
10472 != CODE_FOR_nothing)))
10474 else
10475 return 0;
10478 if (! get_subtarget (target)
10479 || GET_MODE (subtarget) != operand_mode
10480 || ! safe_from_p (subtarget, arg1, 1))
10481 subtarget = 0;
10483 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10484 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10486 if (target == 0)
10487 target = gen_reg_rtx (mode);
10489 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10490 because, if the emit_store_flag does anything it will succeed and
10491 OP0 and OP1 will not be used subsequently. */
10493 result = emit_store_flag (target, code,
10494 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10495 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10496 operand_mode, unsignedp, 1);
10498 if (result)
10500 if (invert)
10501 result = expand_binop (mode, xor_optab, result, const1_rtx,
10502 result, 0, OPTAB_LIB_WIDEN);
10503 return result;
10506 /* If this failed, we have to do this with set/compare/jump/set code. */
10507 if (GET_CODE (target) != REG
10508 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10509 target = gen_reg_rtx (GET_MODE (target));
10511 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10512 result = compare_from_rtx (op0, op1, code, unsignedp,
10513 operand_mode, NULL_RTX, 0);
10514 if (GET_CODE (result) == CONST_INT)
10515 return (((result == const0_rtx && ! invert)
10516 || (result != const0_rtx && invert))
10517 ? const0_rtx : const1_rtx);
10519 label = gen_label_rtx ();
10520 if (bcc_gen_fctn[(int) code] == 0)
10521 abort ();
10523 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10524 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10525 emit_label (label);
10527 return target;
10530 /* Generate a tablejump instruction (used for switch statements). */
10532 #ifdef HAVE_tablejump
10534 /* INDEX is the value being switched on, with the lowest value
10535 in the table already subtracted.
10536 MODE is its expected mode (needed if INDEX is constant).
10537 RANGE is the length of the jump table.
10538 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10540 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10541 index value is out of range. */
10543 void
10544 do_tablejump (index, mode, range, table_label, default_label)
10545 rtx index, range, table_label, default_label;
10546 enum machine_mode mode;
10548 register rtx temp, vector;
10550 /* Do an unsigned comparison (in the proper mode) between the index
10551 expression and the value which represents the length of the range.
10552 Since we just finished subtracting the lower bound of the range
10553 from the index expression, this comparison allows us to simultaneously
10554 check that the original index expression value is both greater than
10555 or equal to the minimum value of the range and less than or equal to
10556 the maximum value of the range. */
10558 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10559 0, default_label);
10561 /* If index is in range, it must fit in Pmode.
10562 Convert to Pmode so we can index with it. */
10563 if (mode != Pmode)
10564 index = convert_to_mode (Pmode, index, 1);
10566 /* Don't let a MEM slip thru, because then INDEX that comes
10567 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10568 and break_out_memory_refs will go to work on it and mess it up. */
10569 #ifdef PIC_CASE_VECTOR_ADDRESS
10570 if (flag_pic && GET_CODE (index) != REG)
10571 index = copy_to_mode_reg (Pmode, index);
10572 #endif
10574 /* If flag_force_addr were to affect this address
10575 it could interfere with the tricky assumptions made
10576 about addresses that contain label-refs,
10577 which may be valid only very near the tablejump itself. */
10578 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10579 GET_MODE_SIZE, because this indicates how large insns are. The other
10580 uses should all be Pmode, because they are addresses. This code
10581 could fail if addresses and insns are not the same size. */
10582 index = gen_rtx_PLUS (Pmode,
10583 gen_rtx_MULT (Pmode, index,
10584 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10585 gen_rtx_LABEL_REF (Pmode, table_label));
10586 #ifdef PIC_CASE_VECTOR_ADDRESS
10587 if (flag_pic)
10588 index = PIC_CASE_VECTOR_ADDRESS (index);
10589 else
10590 #endif
10591 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10592 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10593 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10594 RTX_UNCHANGING_P (vector) = 1;
10595 convert_move (temp, vector, 0);
10597 emit_jump_insn (gen_tablejump (temp, table_label));
10599 /* If we are generating PIC code or if the table is PC-relative, the
10600 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10601 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10602 emit_barrier ();
10605 #endif /* HAVE_tablejump */