2001-01-23 Alexandre Petit-Bianco <apbianco@cygnus.com>
[official-gcc.git] / gcc / expr.c
blob848431e0440825b561f42774e02a6da553143f9c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "insn-config.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37 #include "expr.h"
38 #include "recog.h"
39 #include "reload.h"
40 #include "output.h"
41 #include "typeclass.h"
42 #include "defaults.h"
43 #include "toplev.h"
44 #include "ggc.h"
45 #include "intl.h"
46 #include "tm_p.h"
48 #ifndef ACCUMULATE_OUTGOING_ARGS
49 #define ACCUMULATE_OUTGOING_ARGS 0
50 #endif
52 /* Supply a default definition for PUSH_ARGS. */
53 #ifndef PUSH_ARGS
54 #ifdef PUSH_ROUNDING
55 #define PUSH_ARGS !ACCUMULATE_OUTGOING_ARGS
56 #else
57 #define PUSH_ARGS 0
58 #endif
59 #endif
61 /* Decide whether a function's arguments should be processed
62 from first to last or from last to first.
64 They should if the stack and args grow in opposite directions, but
65 only if we have push insns. */
67 #ifdef PUSH_ROUNDING
69 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
70 #define PUSH_ARGS_REVERSED /* If it's last to first. */
71 #endif
73 #endif
75 #ifndef STACK_PUSH_CODE
76 #ifdef STACK_GROWS_DOWNWARD
77 #define STACK_PUSH_CODE PRE_DEC
78 #else
79 #define STACK_PUSH_CODE PRE_INC
80 #endif
81 #endif
83 /* Assume that case vectors are not pc-relative. */
84 #ifndef CASE_VECTOR_PC_RELATIVE
85 #define CASE_VECTOR_PC_RELATIVE 0
86 #endif
88 /* Hook called by safe_from_p for language-specific tree codes. It is
89 up to the language front-end to install a hook if it has any such
90 codes that safe_from_p needs to know about. Since same_from_p will
91 recursively explore the TREE_OPERANDs of an expression, this hook
92 should not reexamine those pieces. This routine may recursively
93 call safe_from_p; it should always pass `0' as the TOP_P
94 parameter. */
95 int (*lang_safe_from_p) PARAMS ((rtx, tree));
97 /* If this is nonzero, we do not bother generating VOLATILE
98 around volatile memory references, and we are willing to
99 output indirect addresses. If cse is to follow, we reject
100 indirect addresses so a useful potential cse is generated;
101 if it is used only once, instruction combination will produce
102 the same indirect address eventually. */
103 int cse_not_expected;
105 /* Don't check memory usage, since code is being emitted to check a memory
106 usage. Used when current_function_check_memory_usage is true, to avoid
107 infinite recursion. */
108 static int in_check_memory_usage;
110 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
111 static tree placeholder_list = 0;
113 /* This structure is used by move_by_pieces to describe the move to
114 be performed. */
115 struct move_by_pieces
117 rtx to;
118 rtx to_addr;
119 int autinc_to;
120 int explicit_inc_to;
121 rtx from;
122 rtx from_addr;
123 int autinc_from;
124 int explicit_inc_from;
125 unsigned HOST_WIDE_INT len;
126 HOST_WIDE_INT offset;
127 int reverse;
130 /* This structure is used by store_by_pieces to describe the clear to
131 be performed. */
133 struct store_by_pieces
135 rtx to;
136 rtx to_addr;
137 int autinc_to;
138 int explicit_inc_to;
139 unsigned HOST_WIDE_INT len;
140 HOST_WIDE_INT offset;
141 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
142 PTR constfundata;
143 int reverse;
146 extern struct obstack permanent_obstack;
148 static rtx get_push_address PARAMS ((int));
150 static rtx enqueue_insn PARAMS ((rtx, rtx));
151 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
152 PARAMS ((unsigned HOST_WIDE_INT,
153 unsigned int));
154 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
155 struct move_by_pieces *));
156 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
157 enum machine_mode));
158 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
159 unsigned int));
160 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
161 unsigned int));
162 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
163 enum machine_mode,
164 struct store_by_pieces *));
165 static rtx get_subtarget PARAMS ((rtx));
166 static int is_zeros_p PARAMS ((tree));
167 static int mostly_zeros_p PARAMS ((tree));
168 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
169 HOST_WIDE_INT, enum machine_mode,
170 tree, tree, unsigned int, int,
171 int));
172 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
173 HOST_WIDE_INT));
174 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
175 HOST_WIDE_INT, enum machine_mode,
176 tree, enum machine_mode, int,
177 unsigned int, HOST_WIDE_INT, int));
178 static enum memory_use_mode
179 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
180 static tree save_noncopied_parts PARAMS ((tree, tree));
181 static tree init_noncopied_parts PARAMS ((tree, tree));
182 static int fixed_type_p PARAMS ((tree));
183 static rtx var_rtx PARAMS ((tree));
184 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
185 static rtx expand_increment PARAMS ((tree, int, int));
186 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
187 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
188 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
189 rtx, rtx));
190 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
192 /* Record for each mode whether we can move a register directly to or
193 from an object of that mode in memory. If we can't, we won't try
194 to use that mode directly when accessing a field of that mode. */
196 static char direct_load[NUM_MACHINE_MODES];
197 static char direct_store[NUM_MACHINE_MODES];
199 /* If a memory-to-memory move would take MOVE_RATIO or more simple
200 move-instruction sequences, we will do a movstr or libcall instead. */
202 #ifndef MOVE_RATIO
203 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
204 #define MOVE_RATIO 2
205 #else
206 /* If we are optimizing for space (-Os), cut down the default move ratio. */
207 #define MOVE_RATIO (optimize_size ? 3 : 15)
208 #endif
209 #endif
211 /* This macro is used to determine whether move_by_pieces should be called
212 to perform a structure copy. */
213 #ifndef MOVE_BY_PIECES_P
214 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
215 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
216 #endif
218 /* This array records the insn_code of insns to perform block moves. */
219 enum insn_code movstr_optab[NUM_MACHINE_MODES];
221 /* This array records the insn_code of insns to perform block clears. */
222 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
224 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
226 #ifndef SLOW_UNALIGNED_ACCESS
227 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
228 #endif
230 /* This is run once per compilation to set up which modes can be used
231 directly in memory and to initialize the block move optab. */
233 void
234 init_expr_once ()
236 rtx insn, pat;
237 enum machine_mode mode;
238 int num_clobbers;
239 rtx mem, mem1;
241 start_sequence ();
243 /* Try indexing by frame ptr and try by stack ptr.
244 It is known that on the Convex the stack ptr isn't a valid index.
245 With luck, one or the other is valid on any machine. */
246 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
247 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
249 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
250 pat = PATTERN (insn);
252 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
253 mode = (enum machine_mode) ((int) mode + 1))
255 int regno;
256 rtx reg;
258 direct_load[(int) mode] = direct_store[(int) mode] = 0;
259 PUT_MODE (mem, mode);
260 PUT_MODE (mem1, mode);
262 /* See if there is some register that can be used in this mode and
263 directly loaded or stored from memory. */
265 if (mode != VOIDmode && mode != BLKmode)
266 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
267 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
268 regno++)
270 if (! HARD_REGNO_MODE_OK (regno, mode))
271 continue;
273 reg = gen_rtx_REG (mode, regno);
275 SET_SRC (pat) = mem;
276 SET_DEST (pat) = reg;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_load[(int) mode] = 1;
280 SET_SRC (pat) = mem1;
281 SET_DEST (pat) = reg;
282 if (recog (pat, insn, &num_clobbers) >= 0)
283 direct_load[(int) mode] = 1;
285 SET_SRC (pat) = reg;
286 SET_DEST (pat) = mem;
287 if (recog (pat, insn, &num_clobbers) >= 0)
288 direct_store[(int) mode] = 1;
290 SET_SRC (pat) = reg;
291 SET_DEST (pat) = mem1;
292 if (recog (pat, insn, &num_clobbers) >= 0)
293 direct_store[(int) mode] = 1;
297 end_sequence ();
300 /* This is run at the start of compiling a function. */
302 void
303 init_expr ()
305 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
307 pending_chain = 0;
308 pending_stack_adjust = 0;
309 stack_pointer_delta = 0;
310 inhibit_defer_pop = 0;
311 saveregs_value = 0;
312 apply_args_value = 0;
313 forced_labels = 0;
316 void
317 mark_expr_status (p)
318 struct expr_status *p;
320 if (p == NULL)
321 return;
323 ggc_mark_rtx (p->x_saveregs_value);
324 ggc_mark_rtx (p->x_apply_args_value);
325 ggc_mark_rtx (p->x_forced_labels);
328 void
329 free_expr_status (f)
330 struct function *f;
332 free (f->expr);
333 f->expr = NULL;
336 /* Small sanity check that the queue is empty at the end of a function. */
338 void
339 finish_expr_for_function ()
341 if (pending_chain)
342 abort ();
345 /* Manage the queue of increment instructions to be output
346 for POSTINCREMENT_EXPR expressions, etc. */
348 /* Queue up to increment (or change) VAR later. BODY says how:
349 BODY should be the same thing you would pass to emit_insn
350 to increment right away. It will go to emit_insn later on.
352 The value is a QUEUED expression to be used in place of VAR
353 where you want to guarantee the pre-incrementation value of VAR. */
355 static rtx
356 enqueue_insn (var, body)
357 rtx var, body;
359 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
360 body, pending_chain);
361 return pending_chain;
364 /* Use protect_from_queue to convert a QUEUED expression
365 into something that you can put immediately into an instruction.
366 If the queued incrementation has not happened yet,
367 protect_from_queue returns the variable itself.
368 If the incrementation has happened, protect_from_queue returns a temp
369 that contains a copy of the old value of the variable.
371 Any time an rtx which might possibly be a QUEUED is to be put
372 into an instruction, it must be passed through protect_from_queue first.
373 QUEUED expressions are not meaningful in instructions.
375 Do not pass a value through protect_from_queue and then hold
376 on to it for a while before putting it in an instruction!
377 If the queue is flushed in between, incorrect code will result. */
380 protect_from_queue (x, modify)
381 register rtx x;
382 int modify;
384 register RTX_CODE code = GET_CODE (x);
386 #if 0 /* A QUEUED can hang around after the queue is forced out. */
387 /* Shortcut for most common case. */
388 if (pending_chain == 0)
389 return x;
390 #endif
392 if (code != QUEUED)
394 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
395 use of autoincrement. Make a copy of the contents of the memory
396 location rather than a copy of the address, but not if the value is
397 of mode BLKmode. Don't modify X in place since it might be
398 shared. */
399 if (code == MEM && GET_MODE (x) != BLKmode
400 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
402 register rtx y = XEXP (x, 0);
403 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
405 MEM_COPY_ATTRIBUTES (new, x);
407 if (QUEUED_INSN (y))
409 register rtx temp = gen_reg_rtx (GET_MODE (new));
410 emit_insn_before (gen_move_insn (temp, new),
411 QUEUED_INSN (y));
412 return temp;
414 return new;
416 /* Otherwise, recursively protect the subexpressions of all
417 the kinds of rtx's that can contain a QUEUED. */
418 if (code == MEM)
420 rtx tem = protect_from_queue (XEXP (x, 0), 0);
421 if (tem != XEXP (x, 0))
423 x = copy_rtx (x);
424 XEXP (x, 0) = tem;
427 else if (code == PLUS || code == MULT)
429 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
430 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
431 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
433 x = copy_rtx (x);
434 XEXP (x, 0) = new0;
435 XEXP (x, 1) = new1;
438 return x;
440 /* If the increment has not happened, use the variable itself. */
441 if (QUEUED_INSN (x) == 0)
442 return QUEUED_VAR (x);
443 /* If the increment has happened and a pre-increment copy exists,
444 use that copy. */
445 if (QUEUED_COPY (x) != 0)
446 return QUEUED_COPY (x);
447 /* The increment has happened but we haven't set up a pre-increment copy.
448 Set one up now, and use it. */
449 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
450 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
451 QUEUED_INSN (x));
452 return QUEUED_COPY (x);
455 /* Return nonzero if X contains a QUEUED expression:
456 if it contains anything that will be altered by a queued increment.
457 We handle only combinations of MEM, PLUS, MINUS and MULT operators
458 since memory addresses generally contain only those. */
461 queued_subexp_p (x)
462 rtx x;
464 register enum rtx_code code = GET_CODE (x);
465 switch (code)
467 case QUEUED:
468 return 1;
469 case MEM:
470 return queued_subexp_p (XEXP (x, 0));
471 case MULT:
472 case PLUS:
473 case MINUS:
474 return (queued_subexp_p (XEXP (x, 0))
475 || queued_subexp_p (XEXP (x, 1)));
476 default:
477 return 0;
481 /* Perform all the pending incrementations. */
483 void
484 emit_queue ()
486 register rtx p;
487 while ((p = pending_chain))
489 rtx body = QUEUED_BODY (p);
491 if (GET_CODE (body) == SEQUENCE)
493 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
494 emit_insn (QUEUED_BODY (p));
496 else
497 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
498 pending_chain = QUEUED_NEXT (p);
502 /* Copy data from FROM to TO, where the machine modes are not the same.
503 Both modes may be integer, or both may be floating.
504 UNSIGNEDP should be nonzero if FROM is an unsigned type.
505 This causes zero-extension instead of sign-extension. */
507 void
508 convert_move (to, from, unsignedp)
509 register rtx to, from;
510 int unsignedp;
512 enum machine_mode to_mode = GET_MODE (to);
513 enum machine_mode from_mode = GET_MODE (from);
514 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
515 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
516 enum insn_code code;
517 rtx libcall;
519 /* rtx code for making an equivalent value. */
520 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
522 to = protect_from_queue (to, 1);
523 from = protect_from_queue (from, 0);
525 if (to_real != from_real)
526 abort ();
528 /* If FROM is a SUBREG that indicates that we have already done at least
529 the required extension, strip it. We don't handle such SUBREGs as
530 TO here. */
532 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
533 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
534 >= GET_MODE_SIZE (to_mode))
535 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
536 from = gen_lowpart (to_mode, from), from_mode = to_mode;
538 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
539 abort ();
541 if (to_mode == from_mode
542 || (from_mode == VOIDmode && CONSTANT_P (from)))
544 emit_move_insn (to, from);
545 return;
548 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
550 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
551 abort ();
553 if (VECTOR_MODE_P (to_mode))
554 from = gen_rtx_SUBREG (to_mode, from, 0);
555 else
556 to = gen_rtx_SUBREG (from_mode, to, 0);
558 emit_move_insn (to, from);
559 return;
562 if (to_real != from_real)
563 abort ();
565 if (to_real)
567 rtx value, insns;
569 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
571 /* Try converting directly if the insn is supported. */
572 if ((code = can_extend_p (to_mode, from_mode, 0))
573 != CODE_FOR_nothing)
575 emit_unop_insn (code, to, from, UNKNOWN);
576 return;
580 #ifdef HAVE_trunchfqf2
581 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
583 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
584 return;
586 #endif
587 #ifdef HAVE_trunctqfqf2
588 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
590 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
591 return;
593 #endif
594 #ifdef HAVE_truncsfqf2
595 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
597 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
598 return;
600 #endif
601 #ifdef HAVE_truncdfqf2
602 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
604 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
605 return;
607 #endif
608 #ifdef HAVE_truncxfqf2
609 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
611 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
612 return;
614 #endif
615 #ifdef HAVE_trunctfqf2
616 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
618 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
619 return;
621 #endif
623 #ifdef HAVE_trunctqfhf2
624 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
626 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
627 return;
629 #endif
630 #ifdef HAVE_truncsfhf2
631 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
633 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
634 return;
636 #endif
637 #ifdef HAVE_truncdfhf2
638 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
640 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
641 return;
643 #endif
644 #ifdef HAVE_truncxfhf2
645 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
647 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
648 return;
650 #endif
651 #ifdef HAVE_trunctfhf2
652 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
654 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
655 return;
657 #endif
659 #ifdef HAVE_truncsftqf2
660 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
662 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
663 return;
665 #endif
666 #ifdef HAVE_truncdftqf2
667 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
669 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
670 return;
672 #endif
673 #ifdef HAVE_truncxftqf2
674 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
676 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
677 return;
679 #endif
680 #ifdef HAVE_trunctftqf2
681 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
683 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
684 return;
686 #endif
688 #ifdef HAVE_truncdfsf2
689 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
691 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
692 return;
694 #endif
695 #ifdef HAVE_truncxfsf2
696 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
698 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
699 return;
701 #endif
702 #ifdef HAVE_trunctfsf2
703 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
705 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
706 return;
708 #endif
709 #ifdef HAVE_truncxfdf2
710 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
712 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
713 return;
715 #endif
716 #ifdef HAVE_trunctfdf2
717 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
719 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
720 return;
722 #endif
724 libcall = (rtx) 0;
725 switch (from_mode)
727 case SFmode:
728 switch (to_mode)
730 case DFmode:
731 libcall = extendsfdf2_libfunc;
732 break;
734 case XFmode:
735 libcall = extendsfxf2_libfunc;
736 break;
738 case TFmode:
739 libcall = extendsftf2_libfunc;
740 break;
742 default:
743 break;
745 break;
747 case DFmode:
748 switch (to_mode)
750 case SFmode:
751 libcall = truncdfsf2_libfunc;
752 break;
754 case XFmode:
755 libcall = extenddfxf2_libfunc;
756 break;
758 case TFmode:
759 libcall = extenddftf2_libfunc;
760 break;
762 default:
763 break;
765 break;
767 case XFmode:
768 switch (to_mode)
770 case SFmode:
771 libcall = truncxfsf2_libfunc;
772 break;
774 case DFmode:
775 libcall = truncxfdf2_libfunc;
776 break;
778 default:
779 break;
781 break;
783 case TFmode:
784 switch (to_mode)
786 case SFmode:
787 libcall = trunctfsf2_libfunc;
788 break;
790 case DFmode:
791 libcall = trunctfdf2_libfunc;
792 break;
794 default:
795 break;
797 break;
799 default:
800 break;
803 if (libcall == (rtx) 0)
804 /* This conversion is not implemented yet. */
805 abort ();
807 start_sequence ();
808 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
809 1, from, from_mode);
810 insns = get_insns ();
811 end_sequence ();
812 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
813 from));
814 return;
817 /* Now both modes are integers. */
819 /* Handle expanding beyond a word. */
820 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
821 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
823 rtx insns;
824 rtx lowpart;
825 rtx fill_value;
826 rtx lowfrom;
827 int i;
828 enum machine_mode lowpart_mode;
829 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
831 /* Try converting directly if the insn is supported. */
832 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
833 != CODE_FOR_nothing)
835 /* If FROM is a SUBREG, put it into a register. Do this
836 so that we always generate the same set of insns for
837 better cse'ing; if an intermediate assignment occurred,
838 we won't be doing the operation directly on the SUBREG. */
839 if (optimize > 0 && GET_CODE (from) == SUBREG)
840 from = force_reg (from_mode, from);
841 emit_unop_insn (code, to, from, equiv_code);
842 return;
844 /* Next, try converting via full word. */
845 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
846 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
847 != CODE_FOR_nothing))
849 if (GET_CODE (to) == REG)
850 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
851 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
852 emit_unop_insn (code, to,
853 gen_lowpart (word_mode, to), equiv_code);
854 return;
857 /* No special multiword conversion insn; do it by hand. */
858 start_sequence ();
860 /* Since we will turn this into a no conflict block, we must ensure
861 that the source does not overlap the target. */
863 if (reg_overlap_mentioned_p (to, from))
864 from = force_reg (from_mode, from);
866 /* Get a copy of FROM widened to a word, if necessary. */
867 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
868 lowpart_mode = word_mode;
869 else
870 lowpart_mode = from_mode;
872 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
874 lowpart = gen_lowpart (lowpart_mode, to);
875 emit_move_insn (lowpart, lowfrom);
877 /* Compute the value to put in each remaining word. */
878 if (unsignedp)
879 fill_value = const0_rtx;
880 else
882 #ifdef HAVE_slt
883 if (HAVE_slt
884 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
885 && STORE_FLAG_VALUE == -1)
887 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
888 lowpart_mode, 0, 0);
889 fill_value = gen_reg_rtx (word_mode);
890 emit_insn (gen_slt (fill_value));
892 else
893 #endif
895 fill_value
896 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
897 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
898 NULL_RTX, 0);
899 fill_value = convert_to_mode (word_mode, fill_value, 1);
903 /* Fill the remaining words. */
904 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
906 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
907 rtx subword = operand_subword (to, index, 1, to_mode);
909 if (subword == 0)
910 abort ();
912 if (fill_value != subword)
913 emit_move_insn (subword, fill_value);
916 insns = get_insns ();
917 end_sequence ();
919 emit_no_conflict_block (insns, to, from, NULL_RTX,
920 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
921 return;
924 /* Truncating multi-word to a word or less. */
925 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
926 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
928 if (!((GET_CODE (from) == MEM
929 && ! MEM_VOLATILE_P (from)
930 && direct_load[(int) to_mode]
931 && ! mode_dependent_address_p (XEXP (from, 0)))
932 || GET_CODE (from) == REG
933 || GET_CODE (from) == SUBREG))
934 from = force_reg (from_mode, from);
935 convert_move (to, gen_lowpart (word_mode, from), 0);
936 return;
939 /* Handle pointer conversion. */ /* SPEE 900220. */
940 if (to_mode == PQImode)
942 if (from_mode != QImode)
943 from = convert_to_mode (QImode, from, unsignedp);
945 #ifdef HAVE_truncqipqi2
946 if (HAVE_truncqipqi2)
948 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
949 return;
951 #endif /* HAVE_truncqipqi2 */
952 abort ();
955 if (from_mode == PQImode)
957 if (to_mode != QImode)
959 from = convert_to_mode (QImode, from, unsignedp);
960 from_mode = QImode;
962 else
964 #ifdef HAVE_extendpqiqi2
965 if (HAVE_extendpqiqi2)
967 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
968 return;
970 #endif /* HAVE_extendpqiqi2 */
971 abort ();
975 if (to_mode == PSImode)
977 if (from_mode != SImode)
978 from = convert_to_mode (SImode, from, unsignedp);
980 #ifdef HAVE_truncsipsi2
981 if (HAVE_truncsipsi2)
983 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
984 return;
986 #endif /* HAVE_truncsipsi2 */
987 abort ();
990 if (from_mode == PSImode)
992 if (to_mode != SImode)
994 from = convert_to_mode (SImode, from, unsignedp);
995 from_mode = SImode;
997 else
999 #ifdef HAVE_extendpsisi2
1000 if (! unsignedp && HAVE_extendpsisi2)
1002 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1003 return;
1005 #endif /* HAVE_extendpsisi2 */
1006 #ifdef HAVE_zero_extendpsisi2
1007 if (unsignedp && HAVE_zero_extendpsisi2)
1009 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1010 return;
1012 #endif /* HAVE_zero_extendpsisi2 */
1013 abort ();
1017 if (to_mode == PDImode)
1019 if (from_mode != DImode)
1020 from = convert_to_mode (DImode, from, unsignedp);
1022 #ifdef HAVE_truncdipdi2
1023 if (HAVE_truncdipdi2)
1025 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1026 return;
1028 #endif /* HAVE_truncdipdi2 */
1029 abort ();
1032 if (from_mode == PDImode)
1034 if (to_mode != DImode)
1036 from = convert_to_mode (DImode, from, unsignedp);
1037 from_mode = DImode;
1039 else
1041 #ifdef HAVE_extendpdidi2
1042 if (HAVE_extendpdidi2)
1044 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1045 return;
1047 #endif /* HAVE_extendpdidi2 */
1048 abort ();
1052 /* Now follow all the conversions between integers
1053 no more than a word long. */
1055 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1056 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1057 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1058 GET_MODE_BITSIZE (from_mode)))
1060 if (!((GET_CODE (from) == MEM
1061 && ! MEM_VOLATILE_P (from)
1062 && direct_load[(int) to_mode]
1063 && ! mode_dependent_address_p (XEXP (from, 0)))
1064 || GET_CODE (from) == REG
1065 || GET_CODE (from) == SUBREG))
1066 from = force_reg (from_mode, from);
1067 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1068 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1069 from = copy_to_reg (from);
1070 emit_move_insn (to, gen_lowpart (to_mode, from));
1071 return;
1074 /* Handle extension. */
1075 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1077 /* Convert directly if that works. */
1078 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1079 != CODE_FOR_nothing)
1081 emit_unop_insn (code, to, from, equiv_code);
1082 return;
1084 else
1086 enum machine_mode intermediate;
1087 rtx tmp;
1088 tree shift_amount;
1090 /* Search for a mode to convert via. */
1091 for (intermediate = from_mode; intermediate != VOIDmode;
1092 intermediate = GET_MODE_WIDER_MODE (intermediate))
1093 if (((can_extend_p (to_mode, intermediate, unsignedp)
1094 != CODE_FOR_nothing)
1095 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1096 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1097 GET_MODE_BITSIZE (intermediate))))
1098 && (can_extend_p (intermediate, from_mode, unsignedp)
1099 != CODE_FOR_nothing))
1101 convert_move (to, convert_to_mode (intermediate, from,
1102 unsignedp), unsignedp);
1103 return;
1106 /* No suitable intermediate mode.
1107 Generate what we need with shifts. */
1108 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1109 - GET_MODE_BITSIZE (from_mode), 0);
1110 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1111 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1112 to, unsignedp);
1113 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1114 to, unsignedp);
1115 if (tmp != to)
1116 emit_move_insn (to, tmp);
1117 return;
1121 /* Support special truncate insns for certain modes. */
1123 if (from_mode == DImode && to_mode == SImode)
1125 #ifdef HAVE_truncdisi2
1126 if (HAVE_truncdisi2)
1128 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1129 return;
1131 #endif
1132 convert_move (to, force_reg (from_mode, from), unsignedp);
1133 return;
1136 if (from_mode == DImode && to_mode == HImode)
1138 #ifdef HAVE_truncdihi2
1139 if (HAVE_truncdihi2)
1141 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1142 return;
1144 #endif
1145 convert_move (to, force_reg (from_mode, from), unsignedp);
1146 return;
1149 if (from_mode == DImode && to_mode == QImode)
1151 #ifdef HAVE_truncdiqi2
1152 if (HAVE_truncdiqi2)
1154 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1155 return;
1157 #endif
1158 convert_move (to, force_reg (from_mode, from), unsignedp);
1159 return;
1162 if (from_mode == SImode && to_mode == HImode)
1164 #ifdef HAVE_truncsihi2
1165 if (HAVE_truncsihi2)
1167 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1168 return;
1170 #endif
1171 convert_move (to, force_reg (from_mode, from), unsignedp);
1172 return;
1175 if (from_mode == SImode && to_mode == QImode)
1177 #ifdef HAVE_truncsiqi2
1178 if (HAVE_truncsiqi2)
1180 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1181 return;
1183 #endif
1184 convert_move (to, force_reg (from_mode, from), unsignedp);
1185 return;
1188 if (from_mode == HImode && to_mode == QImode)
1190 #ifdef HAVE_trunchiqi2
1191 if (HAVE_trunchiqi2)
1193 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1194 return;
1196 #endif
1197 convert_move (to, force_reg (from_mode, from), unsignedp);
1198 return;
1201 if (from_mode == TImode && to_mode == DImode)
1203 #ifdef HAVE_trunctidi2
1204 if (HAVE_trunctidi2)
1206 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1207 return;
1209 #endif
1210 convert_move (to, force_reg (from_mode, from), unsignedp);
1211 return;
1214 if (from_mode == TImode && to_mode == SImode)
1216 #ifdef HAVE_trunctisi2
1217 if (HAVE_trunctisi2)
1219 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1220 return;
1222 #endif
1223 convert_move (to, force_reg (from_mode, from), unsignedp);
1224 return;
1227 if (from_mode == TImode && to_mode == HImode)
1229 #ifdef HAVE_trunctihi2
1230 if (HAVE_trunctihi2)
1232 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1233 return;
1235 #endif
1236 convert_move (to, force_reg (from_mode, from), unsignedp);
1237 return;
1240 if (from_mode == TImode && to_mode == QImode)
1242 #ifdef HAVE_trunctiqi2
1243 if (HAVE_trunctiqi2)
1245 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1246 return;
1248 #endif
1249 convert_move (to, force_reg (from_mode, from), unsignedp);
1250 return;
1253 /* Handle truncation of volatile memrefs, and so on;
1254 the things that couldn't be truncated directly,
1255 and for which there was no special instruction. */
1256 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1258 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1259 emit_move_insn (to, temp);
1260 return;
1263 /* Mode combination is not recognized. */
1264 abort ();
1267 /* Return an rtx for a value that would result
1268 from converting X to mode MODE.
1269 Both X and MODE may be floating, or both integer.
1270 UNSIGNEDP is nonzero if X is an unsigned value.
1271 This can be done by referring to a part of X in place
1272 or by copying to a new temporary with conversion.
1274 This function *must not* call protect_from_queue
1275 except when putting X into an insn (in which case convert_move does it). */
1278 convert_to_mode (mode, x, unsignedp)
1279 enum machine_mode mode;
1280 rtx x;
1281 int unsignedp;
1283 return convert_modes (mode, VOIDmode, x, unsignedp);
1286 /* Return an rtx for a value that would result
1287 from converting X from mode OLDMODE to mode MODE.
1288 Both modes may be floating, or both integer.
1289 UNSIGNEDP is nonzero if X is an unsigned value.
1291 This can be done by referring to a part of X in place
1292 or by copying to a new temporary with conversion.
1294 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1296 This function *must not* call protect_from_queue
1297 except when putting X into an insn (in which case convert_move does it). */
1300 convert_modes (mode, oldmode, x, unsignedp)
1301 enum machine_mode mode, oldmode;
1302 rtx x;
1303 int unsignedp;
1305 register rtx temp;
1307 /* If FROM is a SUBREG that indicates that we have already done at least
1308 the required extension, strip it. */
1310 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1311 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1312 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1313 x = gen_lowpart (mode, x);
1315 if (GET_MODE (x) != VOIDmode)
1316 oldmode = GET_MODE (x);
1318 if (mode == oldmode)
1319 return x;
1321 /* There is one case that we must handle specially: If we are converting
1322 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1323 we are to interpret the constant as unsigned, gen_lowpart will do
1324 the wrong if the constant appears negative. What we want to do is
1325 make the high-order word of the constant zero, not all ones. */
1327 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1328 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1329 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1331 HOST_WIDE_INT val = INTVAL (x);
1333 if (oldmode != VOIDmode
1334 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1336 int width = GET_MODE_BITSIZE (oldmode);
1338 /* We need to zero extend VAL. */
1339 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1342 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1345 /* We can do this with a gen_lowpart if both desired and current modes
1346 are integer, and this is either a constant integer, a register, or a
1347 non-volatile MEM. Except for the constant case where MODE is no
1348 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1350 if ((GET_CODE (x) == CONST_INT
1351 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1352 || (GET_MODE_CLASS (mode) == MODE_INT
1353 && GET_MODE_CLASS (oldmode) == MODE_INT
1354 && (GET_CODE (x) == CONST_DOUBLE
1355 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1356 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1357 && direct_load[(int) mode])
1358 || (GET_CODE (x) == REG
1359 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1360 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1362 /* ?? If we don't know OLDMODE, we have to assume here that
1363 X does not need sign- or zero-extension. This may not be
1364 the case, but it's the best we can do. */
1365 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1366 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1368 HOST_WIDE_INT val = INTVAL (x);
1369 int width = GET_MODE_BITSIZE (oldmode);
1371 /* We must sign or zero-extend in this case. Start by
1372 zero-extending, then sign extend if we need to. */
1373 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1374 if (! unsignedp
1375 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1376 val |= (HOST_WIDE_INT) (-1) << width;
1378 return GEN_INT (val);
1381 return gen_lowpart (mode, x);
1384 temp = gen_reg_rtx (mode);
1385 convert_move (temp, x, unsignedp);
1386 return temp;
1389 /* This macro is used to determine what the largest unit size that
1390 move_by_pieces can use is. */
1392 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1393 move efficiently, as opposed to MOVE_MAX which is the maximum
1394 number of bytes we can move with a single instruction. */
1396 #ifndef MOVE_MAX_PIECES
1397 #define MOVE_MAX_PIECES MOVE_MAX
1398 #endif
1400 /* Generate several move instructions to copy LEN bytes
1401 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1402 The caller must pass FROM and TO
1403 through protect_from_queue before calling.
1404 ALIGN is maximum alignment we can assume. */
1406 void
1407 move_by_pieces (to, from, len, align)
1408 rtx to, from;
1409 unsigned HOST_WIDE_INT len;
1410 unsigned int align;
1412 struct move_by_pieces data;
1413 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1414 unsigned int max_size = MOVE_MAX_PIECES + 1;
1415 enum machine_mode mode = VOIDmode, tmode;
1416 enum insn_code icode;
1418 data.offset = 0;
1419 data.to_addr = to_addr;
1420 data.from_addr = from_addr;
1421 data.to = to;
1422 data.from = from;
1423 data.autinc_to
1424 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1425 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1426 data.autinc_from
1427 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1428 || GET_CODE (from_addr) == POST_INC
1429 || GET_CODE (from_addr) == POST_DEC);
1431 data.explicit_inc_from = 0;
1432 data.explicit_inc_to = 0;
1433 data.reverse
1434 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1435 if (data.reverse) data.offset = len;
1436 data.len = len;
1438 /* If copying requires more than two move insns,
1439 copy addresses to registers (to make displacements shorter)
1440 and use post-increment if available. */
1441 if (!(data.autinc_from && data.autinc_to)
1442 && move_by_pieces_ninsns (len, align) > 2)
1444 /* Find the mode of the largest move... */
1445 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1446 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1447 if (GET_MODE_SIZE (tmode) < max_size)
1448 mode = tmode;
1450 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1452 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1453 data.autinc_from = 1;
1454 data.explicit_inc_from = -1;
1456 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1458 data.from_addr = copy_addr_to_reg (from_addr);
1459 data.autinc_from = 1;
1460 data.explicit_inc_from = 1;
1462 if (!data.autinc_from && CONSTANT_P (from_addr))
1463 data.from_addr = copy_addr_to_reg (from_addr);
1464 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1466 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1467 data.autinc_to = 1;
1468 data.explicit_inc_to = -1;
1470 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1472 data.to_addr = copy_addr_to_reg (to_addr);
1473 data.autinc_to = 1;
1474 data.explicit_inc_to = 1;
1476 if (!data.autinc_to && CONSTANT_P (to_addr))
1477 data.to_addr = copy_addr_to_reg (to_addr);
1480 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1481 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1482 align = MOVE_MAX * BITS_PER_UNIT;
1484 /* First move what we can in the largest integer mode, then go to
1485 successively smaller modes. */
1487 while (max_size > 1)
1489 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1490 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1491 if (GET_MODE_SIZE (tmode) < max_size)
1492 mode = tmode;
1494 if (mode == VOIDmode)
1495 break;
1497 icode = mov_optab->handlers[(int) mode].insn_code;
1498 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1499 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1501 max_size = GET_MODE_SIZE (mode);
1504 /* The code above should have handled everything. */
1505 if (data.len > 0)
1506 abort ();
1509 /* Return number of insns required to move L bytes by pieces.
1510 ALIGN (in bytes) is maximum alignment we can assume. */
1512 static unsigned HOST_WIDE_INT
1513 move_by_pieces_ninsns (l, align)
1514 unsigned HOST_WIDE_INT l;
1515 unsigned int align;
1517 unsigned HOST_WIDE_INT n_insns = 0;
1518 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1520 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1521 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1522 align = MOVE_MAX * BITS_PER_UNIT;
1524 while (max_size > 1)
1526 enum machine_mode mode = VOIDmode, tmode;
1527 enum insn_code icode;
1529 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1530 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1531 if (GET_MODE_SIZE (tmode) < max_size)
1532 mode = tmode;
1534 if (mode == VOIDmode)
1535 break;
1537 icode = mov_optab->handlers[(int) mode].insn_code;
1538 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1539 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1541 max_size = GET_MODE_SIZE (mode);
1544 if (l)
1545 abort ();
1546 return n_insns;
1549 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1550 with move instructions for mode MODE. GENFUN is the gen_... function
1551 to make a move insn for that mode. DATA has all the other info. */
1553 static void
1554 move_by_pieces_1 (genfun, mode, data)
1555 rtx (*genfun) PARAMS ((rtx, ...));
1556 enum machine_mode mode;
1557 struct move_by_pieces *data;
1559 unsigned int size = GET_MODE_SIZE (mode);
1560 rtx to1, from1;
1562 while (data->len >= size)
1564 if (data->reverse)
1565 data->offset -= size;
1567 if (data->autinc_to)
1569 to1 = gen_rtx_MEM (mode, data->to_addr);
1570 MEM_COPY_ATTRIBUTES (to1, data->to);
1572 else
1573 to1 = change_address (data->to, mode,
1574 plus_constant (data->to_addr, data->offset));
1576 if (data->autinc_from)
1578 from1 = gen_rtx_MEM (mode, data->from_addr);
1579 MEM_COPY_ATTRIBUTES (from1, data->from);
1581 else
1582 from1 = change_address (data->from, mode,
1583 plus_constant (data->from_addr, data->offset));
1585 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1586 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1587 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1588 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1590 emit_insn ((*genfun) (to1, from1));
1592 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1593 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1594 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1595 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1597 if (! data->reverse)
1598 data->offset += size;
1600 data->len -= size;
1604 /* Emit code to move a block Y to a block X.
1605 This may be done with string-move instructions,
1606 with multiple scalar move instructions, or with a library call.
1608 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1609 with mode BLKmode.
1610 SIZE is an rtx that says how long they are.
1611 ALIGN is the maximum alignment we can assume they have.
1613 Return the address of the new block, if memcpy is called and returns it,
1614 0 otherwise. */
1617 emit_block_move (x, y, size, align)
1618 rtx x, y;
1619 rtx size;
1620 unsigned int align;
1622 rtx retval = 0;
1623 #ifdef TARGET_MEM_FUNCTIONS
1624 static tree fn;
1625 tree call_expr, arg_list;
1626 #endif
1628 if (GET_MODE (x) != BLKmode)
1629 abort ();
1631 if (GET_MODE (y) != BLKmode)
1632 abort ();
1634 x = protect_from_queue (x, 1);
1635 y = protect_from_queue (y, 0);
1636 size = protect_from_queue (size, 0);
1638 if (GET_CODE (x) != MEM)
1639 abort ();
1640 if (GET_CODE (y) != MEM)
1641 abort ();
1642 if (size == 0)
1643 abort ();
1645 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1646 move_by_pieces (x, y, INTVAL (size), align);
1647 else
1649 /* Try the most limited insn first, because there's no point
1650 including more than one in the machine description unless
1651 the more limited one has some advantage. */
1653 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1654 enum machine_mode mode;
1656 /* Since this is a move insn, we don't care about volatility. */
1657 volatile_ok = 1;
1659 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1660 mode = GET_MODE_WIDER_MODE (mode))
1662 enum insn_code code = movstr_optab[(int) mode];
1663 insn_operand_predicate_fn pred;
1665 if (code != CODE_FOR_nothing
1666 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1667 here because if SIZE is less than the mode mask, as it is
1668 returned by the macro, it will definitely be less than the
1669 actual mode mask. */
1670 && ((GET_CODE (size) == CONST_INT
1671 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1672 <= (GET_MODE_MASK (mode) >> 1)))
1673 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1674 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1675 || (*pred) (x, BLKmode))
1676 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1677 || (*pred) (y, BLKmode))
1678 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1679 || (*pred) (opalign, VOIDmode)))
1681 rtx op2;
1682 rtx last = get_last_insn ();
1683 rtx pat;
1685 op2 = convert_to_mode (mode, size, 1);
1686 pred = insn_data[(int) code].operand[2].predicate;
1687 if (pred != 0 && ! (*pred) (op2, mode))
1688 op2 = copy_to_mode_reg (mode, op2);
1690 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1691 if (pat)
1693 emit_insn (pat);
1694 volatile_ok = 0;
1695 return 0;
1697 else
1698 delete_insns_since (last);
1702 volatile_ok = 0;
1704 /* X, Y, or SIZE may have been passed through protect_from_queue.
1706 It is unsafe to save the value generated by protect_from_queue
1707 and reuse it later. Consider what happens if emit_queue is
1708 called before the return value from protect_from_queue is used.
1710 Expansion of the CALL_EXPR below will call emit_queue before
1711 we are finished emitting RTL for argument setup. So if we are
1712 not careful we could get the wrong value for an argument.
1714 To avoid this problem we go ahead and emit code to copy X, Y &
1715 SIZE into new pseudos. We can then place those new pseudos
1716 into an RTL_EXPR and use them later, even after a call to
1717 emit_queue.
1719 Note this is not strictly needed for library calls since they
1720 do not call emit_queue before loading their arguments. However,
1721 we may need to have library calls call emit_queue in the future
1722 since failing to do so could cause problems for targets which
1723 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1724 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1725 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1727 #ifdef TARGET_MEM_FUNCTIONS
1728 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1729 #else
1730 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1731 TREE_UNSIGNED (integer_type_node));
1732 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1733 #endif
1735 #ifdef TARGET_MEM_FUNCTIONS
1736 /* It is incorrect to use the libcall calling conventions to call
1737 memcpy in this context.
1739 This could be a user call to memcpy and the user may wish to
1740 examine the return value from memcpy.
1742 For targets where libcalls and normal calls have different conventions
1743 for returning pointers, we could end up generating incorrect code.
1745 So instead of using a libcall sequence we build up a suitable
1746 CALL_EXPR and expand the call in the normal fashion. */
1747 if (fn == NULL_TREE)
1749 tree fntype;
1751 /* This was copied from except.c, I don't know if all this is
1752 necessary in this context or not. */
1753 fn = get_identifier ("memcpy");
1754 fntype = build_pointer_type (void_type_node);
1755 fntype = build_function_type (fntype, NULL_TREE);
1756 fn = build_decl (FUNCTION_DECL, fn, fntype);
1757 ggc_add_tree_root (&fn, 1);
1758 DECL_EXTERNAL (fn) = 1;
1759 TREE_PUBLIC (fn) = 1;
1760 DECL_ARTIFICIAL (fn) = 1;
1761 make_decl_rtl (fn, NULL_PTR);
1762 assemble_external (fn);
1765 /* We need to make an argument list for the function call.
1767 memcpy has three arguments, the first two are void * addresses and
1768 the last is a size_t byte count for the copy. */
1769 arg_list
1770 = build_tree_list (NULL_TREE,
1771 make_tree (build_pointer_type (void_type_node), x));
1772 TREE_CHAIN (arg_list)
1773 = build_tree_list (NULL_TREE,
1774 make_tree (build_pointer_type (void_type_node), y));
1775 TREE_CHAIN (TREE_CHAIN (arg_list))
1776 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1777 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1779 /* Now we have to build up the CALL_EXPR itself. */
1780 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1781 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1782 call_expr, arg_list, NULL_TREE);
1783 TREE_SIDE_EFFECTS (call_expr) = 1;
1785 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1786 #else
1787 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1788 VOIDmode, 3, y, Pmode, x, Pmode,
1789 convert_to_mode (TYPE_MODE (integer_type_node), size,
1790 TREE_UNSIGNED (integer_type_node)),
1791 TYPE_MODE (integer_type_node));
1792 #endif
1795 return retval;
1798 /* Copy all or part of a value X into registers starting at REGNO.
1799 The number of registers to be filled is NREGS. */
1801 void
1802 move_block_to_reg (regno, x, nregs, mode)
1803 int regno;
1804 rtx x;
1805 int nregs;
1806 enum machine_mode mode;
1808 int i;
1809 #ifdef HAVE_load_multiple
1810 rtx pat;
1811 rtx last;
1812 #endif
1814 if (nregs == 0)
1815 return;
1817 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1818 x = validize_mem (force_const_mem (mode, x));
1820 /* See if the machine can do this with a load multiple insn. */
1821 #ifdef HAVE_load_multiple
1822 if (HAVE_load_multiple)
1824 last = get_last_insn ();
1825 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1826 GEN_INT (nregs));
1827 if (pat)
1829 emit_insn (pat);
1830 return;
1832 else
1833 delete_insns_since (last);
1835 #endif
1837 for (i = 0; i < nregs; i++)
1838 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1839 operand_subword_force (x, i, mode));
1842 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1843 The number of registers to be filled is NREGS. SIZE indicates the number
1844 of bytes in the object X. */
1846 void
1847 move_block_from_reg (regno, x, nregs, size)
1848 int regno;
1849 rtx x;
1850 int nregs;
1851 int size;
1853 int i;
1854 #ifdef HAVE_store_multiple
1855 rtx pat;
1856 rtx last;
1857 #endif
1858 enum machine_mode mode;
1860 if (nregs == 0)
1861 return;
1863 /* If SIZE is that of a mode no bigger than a word, just use that
1864 mode's store operation. */
1865 if (size <= UNITS_PER_WORD
1866 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1868 emit_move_insn (change_address (x, mode, NULL),
1869 gen_rtx_REG (mode, regno));
1870 return;
1873 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1874 to the left before storing to memory. Note that the previous test
1875 doesn't handle all cases (e.g. SIZE == 3). */
1876 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1878 rtx tem = operand_subword (x, 0, 1, BLKmode);
1879 rtx shift;
1881 if (tem == 0)
1882 abort ();
1884 shift = expand_shift (LSHIFT_EXPR, word_mode,
1885 gen_rtx_REG (word_mode, regno),
1886 build_int_2 ((UNITS_PER_WORD - size)
1887 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1888 emit_move_insn (tem, shift);
1889 return;
1892 /* See if the machine can do this with a store multiple insn. */
1893 #ifdef HAVE_store_multiple
1894 if (HAVE_store_multiple)
1896 last = get_last_insn ();
1897 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1898 GEN_INT (nregs));
1899 if (pat)
1901 emit_insn (pat);
1902 return;
1904 else
1905 delete_insns_since (last);
1907 #endif
1909 for (i = 0; i < nregs; i++)
1911 rtx tem = operand_subword (x, i, 1, BLKmode);
1913 if (tem == 0)
1914 abort ();
1916 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1920 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1921 registers represented by a PARALLEL. SSIZE represents the total size of
1922 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1923 SRC in bits. */
1924 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1925 the balance will be in what would be the low-order memory addresses, i.e.
1926 left justified for big endian, right justified for little endian. This
1927 happens to be true for the targets currently using this support. If this
1928 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1929 would be needed. */
1931 void
1932 emit_group_load (dst, orig_src, ssize, align)
1933 rtx dst, orig_src;
1934 unsigned int align;
1935 int ssize;
1937 rtx *tmps, src;
1938 int start, i;
1940 if (GET_CODE (dst) != PARALLEL)
1941 abort ();
1943 /* Check for a NULL entry, used to indicate that the parameter goes
1944 both on the stack and in registers. */
1945 if (XEXP (XVECEXP (dst, 0, 0), 0))
1946 start = 0;
1947 else
1948 start = 1;
1950 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1952 /* If we won't be loading directly from memory, protect the real source
1953 from strange tricks we might play. */
1954 src = orig_src;
1955 if (GET_CODE (src) != MEM && ! CONSTANT_P (src))
1957 if (GET_MODE (src) == VOIDmode)
1958 src = gen_reg_rtx (GET_MODE (dst));
1959 else
1960 src = gen_reg_rtx (GET_MODE (orig_src));
1961 emit_move_insn (src, orig_src);
1964 /* Process the pieces. */
1965 for (i = start; i < XVECLEN (dst, 0); i++)
1967 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1968 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1969 unsigned int bytelen = GET_MODE_SIZE (mode);
1970 int shift = 0;
1972 /* Handle trailing fragments that run over the size of the struct. */
1973 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1975 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1976 bytelen = ssize - bytepos;
1977 if (bytelen <= 0)
1978 abort ();
1981 /* Optimize the access just a bit. */
1982 if (GET_CODE (src) == MEM
1983 && align >= GET_MODE_ALIGNMENT (mode)
1984 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1985 && bytelen == GET_MODE_SIZE (mode))
1987 tmps[i] = gen_reg_rtx (mode);
1988 emit_move_insn (tmps[i],
1989 change_address (src, mode,
1990 plus_constant (XEXP (src, 0),
1991 bytepos)));
1993 else if (GET_CODE (src) == CONCAT)
1995 if (bytepos == 0
1996 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1997 tmps[i] = XEXP (src, 0);
1998 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1999 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
2000 tmps[i] = XEXP (src, 1);
2001 else
2002 abort ();
2004 else if ((CONSTANT_P (src)
2005 && (GET_MODE (src) == VOIDmode || GET_MODE (src) == mode))
2006 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2007 tmps[i] = src;
2008 else
2009 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2010 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2011 mode, mode, align, ssize);
2013 if (BYTES_BIG_ENDIAN && shift)
2014 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2015 tmps[i], 0, OPTAB_WIDEN);
2018 emit_queue ();
2020 /* Copy the extracted pieces into the proper (probable) hard regs. */
2021 for (i = start; i < XVECLEN (dst, 0); i++)
2022 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2025 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2026 registers represented by a PARALLEL. SSIZE represents the total size of
2027 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2029 void
2030 emit_group_store (orig_dst, src, ssize, align)
2031 rtx orig_dst, src;
2032 int ssize;
2033 unsigned int align;
2035 rtx *tmps, dst;
2036 int start, i;
2038 if (GET_CODE (src) != PARALLEL)
2039 abort ();
2041 /* Check for a NULL entry, used to indicate that the parameter goes
2042 both on the stack and in registers. */
2043 if (XEXP (XVECEXP (src, 0, 0), 0))
2044 start = 0;
2045 else
2046 start = 1;
2048 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2050 /* Copy the (probable) hard regs into pseudos. */
2051 for (i = start; i < XVECLEN (src, 0); i++)
2053 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2054 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2055 emit_move_insn (tmps[i], reg);
2057 emit_queue ();
2059 /* If we won't be storing directly into memory, protect the real destination
2060 from strange tricks we might play. */
2061 dst = orig_dst;
2062 if (GET_CODE (dst) == PARALLEL)
2064 rtx temp;
2066 /* We can get a PARALLEL dst if there is a conditional expression in
2067 a return statement. In that case, the dst and src are the same,
2068 so no action is necessary. */
2069 if (rtx_equal_p (dst, src))
2070 return;
2072 /* It is unclear if we can ever reach here, but we may as well handle
2073 it. Allocate a temporary, and split this into a store/load to/from
2074 the temporary. */
2076 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2077 emit_group_store (temp, src, ssize, align);
2078 emit_group_load (dst, temp, ssize, align);
2079 return;
2081 else if (GET_CODE (dst) != MEM)
2083 dst = gen_reg_rtx (GET_MODE (orig_dst));
2084 /* Make life a bit easier for combine. */
2085 emit_move_insn (dst, const0_rtx);
2088 /* Process the pieces. */
2089 for (i = start; i < XVECLEN (src, 0); i++)
2091 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2092 enum machine_mode mode = GET_MODE (tmps[i]);
2093 unsigned int bytelen = GET_MODE_SIZE (mode);
2095 /* Handle trailing fragments that run over the size of the struct. */
2096 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2098 if (BYTES_BIG_ENDIAN)
2100 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2101 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2102 tmps[i], 0, OPTAB_WIDEN);
2104 bytelen = ssize - bytepos;
2107 /* Optimize the access just a bit. */
2108 if (GET_CODE (dst) == MEM
2109 && align >= GET_MODE_ALIGNMENT (mode)
2110 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2111 && bytelen == GET_MODE_SIZE (mode))
2112 emit_move_insn (change_address (dst, mode,
2113 plus_constant (XEXP (dst, 0),
2114 bytepos)),
2115 tmps[i]);
2116 else
2117 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2118 mode, tmps[i], align, ssize);
2121 emit_queue ();
2123 /* Copy from the pseudo into the (probable) hard reg. */
2124 if (GET_CODE (dst) == REG)
2125 emit_move_insn (orig_dst, dst);
2128 /* Generate code to copy a BLKmode object of TYPE out of a
2129 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2130 is null, a stack temporary is created. TGTBLK is returned.
2132 The primary purpose of this routine is to handle functions
2133 that return BLKmode structures in registers. Some machines
2134 (the PA for example) want to return all small structures
2135 in registers regardless of the structure's alignment. */
2138 copy_blkmode_from_reg (tgtblk, srcreg, type)
2139 rtx tgtblk;
2140 rtx srcreg;
2141 tree type;
2143 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2144 rtx src = NULL, dst = NULL;
2145 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2146 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2148 if (tgtblk == 0)
2150 tgtblk = assign_temp (build_qualified_type (type,
2151 (TYPE_QUALS (type)
2152 | TYPE_QUAL_CONST)),
2153 0, 1, 1);
2154 preserve_temp_slots (tgtblk);
2157 /* This code assumes srcreg is at least a full word. If it isn't,
2158 copy it into a new pseudo which is a full word. */
2159 if (GET_MODE (srcreg) != BLKmode
2160 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2161 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2163 /* Structures whose size is not a multiple of a word are aligned
2164 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2165 machine, this means we must skip the empty high order bytes when
2166 calculating the bit offset. */
2167 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2168 big_endian_correction
2169 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2171 /* Copy the structure BITSIZE bites at a time.
2173 We could probably emit more efficient code for machines which do not use
2174 strict alignment, but it doesn't seem worth the effort at the current
2175 time. */
2176 for (bitpos = 0, xbitpos = big_endian_correction;
2177 bitpos < bytes * BITS_PER_UNIT;
2178 bitpos += bitsize, xbitpos += bitsize)
2180 /* We need a new source operand each time xbitpos is on a
2181 word boundary and when xbitpos == big_endian_correction
2182 (the first time through). */
2183 if (xbitpos % BITS_PER_WORD == 0
2184 || xbitpos == big_endian_correction)
2185 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, BLKmode);
2187 /* We need a new destination operand each time bitpos is on
2188 a word boundary. */
2189 if (bitpos % BITS_PER_WORD == 0)
2190 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2192 /* Use xbitpos for the source extraction (right justified) and
2193 xbitpos for the destination store (left justified). */
2194 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2195 extract_bit_field (src, bitsize,
2196 xbitpos % BITS_PER_WORD, 1,
2197 NULL_RTX, word_mode, word_mode,
2198 bitsize, BITS_PER_WORD),
2199 bitsize, BITS_PER_WORD);
2202 return tgtblk;
2205 /* Add a USE expression for REG to the (possibly empty) list pointed
2206 to by CALL_FUSAGE. REG must denote a hard register. */
2208 void
2209 use_reg (call_fusage, reg)
2210 rtx *call_fusage, reg;
2212 if (GET_CODE (reg) != REG
2213 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2214 abort ();
2216 *call_fusage
2217 = gen_rtx_EXPR_LIST (VOIDmode,
2218 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2221 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2222 starting at REGNO. All of these registers must be hard registers. */
2224 void
2225 use_regs (call_fusage, regno, nregs)
2226 rtx *call_fusage;
2227 int regno;
2228 int nregs;
2230 int i;
2232 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2233 abort ();
2235 for (i = 0; i < nregs; i++)
2236 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2239 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2240 PARALLEL REGS. This is for calls that pass values in multiple
2241 non-contiguous locations. The Irix 6 ABI has examples of this. */
2243 void
2244 use_group_regs (call_fusage, regs)
2245 rtx *call_fusage;
2246 rtx regs;
2248 int i;
2250 for (i = 0; i < XVECLEN (regs, 0); i++)
2252 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2254 /* A NULL entry means the parameter goes both on the stack and in
2255 registers. This can also be a MEM for targets that pass values
2256 partially on the stack and partially in registers. */
2257 if (reg != 0 && GET_CODE (reg) == REG)
2258 use_reg (call_fusage, reg);
2264 can_store_by_pieces (len, constfun, constfundata, align)
2265 unsigned HOST_WIDE_INT len;
2266 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2267 PTR constfundata;
2268 unsigned int align;
2270 unsigned HOST_WIDE_INT max_size, l;
2271 HOST_WIDE_INT offset = 0;
2272 enum machine_mode mode, tmode;
2273 enum insn_code icode;
2274 int reverse;
2275 rtx cst;
2277 if (! MOVE_BY_PIECES_P (len, align))
2278 return 0;
2280 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2281 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2282 align = MOVE_MAX * BITS_PER_UNIT;
2284 /* We would first store what we can in the largest integer mode, then go to
2285 successively smaller modes. */
2287 for (reverse = 0;
2288 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2289 reverse++)
2291 l = len;
2292 mode = VOIDmode;
2293 max_size = MOVE_MAX_PIECES + 1;
2294 while (max_size > 1)
2296 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2297 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2298 if (GET_MODE_SIZE (tmode) < max_size)
2299 mode = tmode;
2301 if (mode == VOIDmode)
2302 break;
2304 icode = mov_optab->handlers[(int) mode].insn_code;
2305 if (icode != CODE_FOR_nothing
2306 && align >= GET_MODE_ALIGNMENT (mode))
2308 unsigned int size = GET_MODE_SIZE (mode);
2310 while (l >= size)
2312 if (reverse)
2313 offset -= size;
2315 cst = (*constfun) (constfundata, offset, mode);
2316 if (!LEGITIMATE_CONSTANT_P (cst))
2317 return 0;
2319 if (!reverse)
2320 offset += size;
2322 l -= size;
2326 max_size = GET_MODE_SIZE (mode);
2329 /* The code above should have handled everything. */
2330 if (l != 0)
2331 abort ();
2334 return 1;
2337 /* Generate several move instructions to store LEN bytes generated by
2338 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2339 pointer which will be passed as argument in every CONSTFUN call.
2340 ALIGN is maximum alignment we can assume. */
2342 void
2343 store_by_pieces (to, len, constfun, constfundata, align)
2344 rtx to;
2345 unsigned HOST_WIDE_INT len;
2346 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2347 PTR constfundata;
2348 unsigned int align;
2350 struct store_by_pieces data;
2352 if (! MOVE_BY_PIECES_P (len, align))
2353 abort ();
2354 to = protect_from_queue (to, 1);
2355 data.constfun = constfun;
2356 data.constfundata = constfundata;
2357 data.len = len;
2358 data.to = to;
2359 store_by_pieces_1 (&data, align);
2362 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2363 rtx with BLKmode). The caller must pass TO through protect_from_queue
2364 before calling. ALIGN is maximum alignment we can assume. */
2366 static void
2367 clear_by_pieces (to, len, align)
2368 rtx to;
2369 unsigned HOST_WIDE_INT len;
2370 unsigned int align;
2372 struct store_by_pieces data;
2374 data.constfun = clear_by_pieces_1;
2375 data.constfundata = NULL_PTR;
2376 data.len = len;
2377 data.to = to;
2378 store_by_pieces_1 (&data, align);
2381 /* Callback routine for clear_by_pieces.
2382 Return const0_rtx unconditionally. */
2384 static rtx
2385 clear_by_pieces_1 (data, offset, mode)
2386 PTR data ATTRIBUTE_UNUSED;
2387 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2388 enum machine_mode mode ATTRIBUTE_UNUSED;
2390 return const0_rtx;
2393 /* Subroutine of clear_by_pieces and store_by_pieces.
2394 Generate several move instructions to store LEN bytes of block TO. (A MEM
2395 rtx with BLKmode). The caller must pass TO through protect_from_queue
2396 before calling. ALIGN is maximum alignment we can assume. */
2398 static void
2399 store_by_pieces_1 (data, align)
2400 struct store_by_pieces *data;
2401 unsigned int align;
2403 rtx to_addr = XEXP (data->to, 0);
2404 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2405 enum machine_mode mode = VOIDmode, tmode;
2406 enum insn_code icode;
2408 data->offset = 0;
2409 data->to_addr = to_addr;
2410 data->autinc_to
2411 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2412 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2414 data->explicit_inc_to = 0;
2415 data->reverse
2416 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2417 if (data->reverse)
2418 data->offset = data->len;
2420 /* If storing requires more than two move insns,
2421 copy addresses to registers (to make displacements shorter)
2422 and use post-increment if available. */
2423 if (!data->autinc_to
2424 && move_by_pieces_ninsns (data->len, align) > 2)
2426 /* Determine the main mode we'll be using. */
2427 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2428 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2429 if (GET_MODE_SIZE (tmode) < max_size)
2430 mode = tmode;
2432 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2434 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2435 data->autinc_to = 1;
2436 data->explicit_inc_to = -1;
2439 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2440 && ! data->autinc_to)
2442 data->to_addr = copy_addr_to_reg (to_addr);
2443 data->autinc_to = 1;
2444 data->explicit_inc_to = 1;
2447 if ( !data->autinc_to && CONSTANT_P (to_addr))
2448 data->to_addr = copy_addr_to_reg (to_addr);
2451 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2452 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2453 align = MOVE_MAX * BITS_PER_UNIT;
2455 /* First store what we can in the largest integer mode, then go to
2456 successively smaller modes. */
2458 while (max_size > 1)
2460 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2461 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2462 if (GET_MODE_SIZE (tmode) < max_size)
2463 mode = tmode;
2465 if (mode == VOIDmode)
2466 break;
2468 icode = mov_optab->handlers[(int) mode].insn_code;
2469 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2470 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2472 max_size = GET_MODE_SIZE (mode);
2475 /* The code above should have handled everything. */
2476 if (data->len != 0)
2477 abort ();
2480 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2481 with move instructions for mode MODE. GENFUN is the gen_... function
2482 to make a move insn for that mode. DATA has all the other info. */
2484 static void
2485 store_by_pieces_2 (genfun, mode, data)
2486 rtx (*genfun) PARAMS ((rtx, ...));
2487 enum machine_mode mode;
2488 struct store_by_pieces *data;
2490 unsigned int size = GET_MODE_SIZE (mode);
2491 rtx to1, cst;
2493 while (data->len >= size)
2495 if (data->reverse)
2496 data->offset -= size;
2498 if (data->autinc_to)
2500 to1 = gen_rtx_MEM (mode, data->to_addr);
2501 MEM_COPY_ATTRIBUTES (to1, data->to);
2503 else
2504 to1 = change_address (data->to, mode,
2505 plus_constant (data->to_addr, data->offset));
2507 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2508 emit_insn (gen_add2_insn (data->to_addr,
2509 GEN_INT (-(HOST_WIDE_INT) size)));
2511 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2512 emit_insn ((*genfun) (to1, cst));
2514 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2515 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2517 if (! data->reverse)
2518 data->offset += size;
2520 data->len -= size;
2524 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2525 its length in bytes and ALIGN is the maximum alignment we can is has.
2527 If we call a function that returns the length of the block, return it. */
2530 clear_storage (object, size, align)
2531 rtx object;
2532 rtx size;
2533 unsigned int align;
2535 #ifdef TARGET_MEM_FUNCTIONS
2536 static tree fn;
2537 tree call_expr, arg_list;
2538 #endif
2539 rtx retval = 0;
2541 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2542 just move a zero. Otherwise, do this a piece at a time. */
2543 if (GET_MODE (object) != BLKmode
2544 && GET_CODE (size) == CONST_INT
2545 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2546 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2547 else
2549 object = protect_from_queue (object, 1);
2550 size = protect_from_queue (size, 0);
2552 if (GET_CODE (size) == CONST_INT
2553 && MOVE_BY_PIECES_P (INTVAL (size), align))
2554 clear_by_pieces (object, INTVAL (size), align);
2555 else
2557 /* Try the most limited insn first, because there's no point
2558 including more than one in the machine description unless
2559 the more limited one has some advantage. */
2561 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2562 enum machine_mode mode;
2564 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2565 mode = GET_MODE_WIDER_MODE (mode))
2567 enum insn_code code = clrstr_optab[(int) mode];
2568 insn_operand_predicate_fn pred;
2570 if (code != CODE_FOR_nothing
2571 /* We don't need MODE to be narrower than
2572 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2573 the mode mask, as it is returned by the macro, it will
2574 definitely be less than the actual mode mask. */
2575 && ((GET_CODE (size) == CONST_INT
2576 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2577 <= (GET_MODE_MASK (mode) >> 1)))
2578 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2579 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2580 || (*pred) (object, BLKmode))
2581 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2582 || (*pred) (opalign, VOIDmode)))
2584 rtx op1;
2585 rtx last = get_last_insn ();
2586 rtx pat;
2588 op1 = convert_to_mode (mode, size, 1);
2589 pred = insn_data[(int) code].operand[1].predicate;
2590 if (pred != 0 && ! (*pred) (op1, mode))
2591 op1 = copy_to_mode_reg (mode, op1);
2593 pat = GEN_FCN ((int) code) (object, op1, opalign);
2594 if (pat)
2596 emit_insn (pat);
2597 return 0;
2599 else
2600 delete_insns_since (last);
2604 /* OBJECT or SIZE may have been passed through protect_from_queue.
2606 It is unsafe to save the value generated by protect_from_queue
2607 and reuse it later. Consider what happens if emit_queue is
2608 called before the return value from protect_from_queue is used.
2610 Expansion of the CALL_EXPR below will call emit_queue before
2611 we are finished emitting RTL for argument setup. So if we are
2612 not careful we could get the wrong value for an argument.
2614 To avoid this problem we go ahead and emit code to copy OBJECT
2615 and SIZE into new pseudos. We can then place those new pseudos
2616 into an RTL_EXPR and use them later, even after a call to
2617 emit_queue.
2619 Note this is not strictly needed for library calls since they
2620 do not call emit_queue before loading their arguments. However,
2621 we may need to have library calls call emit_queue in the future
2622 since failing to do so could cause problems for targets which
2623 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2624 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2626 #ifdef TARGET_MEM_FUNCTIONS
2627 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2628 #else
2629 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2630 TREE_UNSIGNED (integer_type_node));
2631 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2632 #endif
2634 #ifdef TARGET_MEM_FUNCTIONS
2635 /* It is incorrect to use the libcall calling conventions to call
2636 memset in this context.
2638 This could be a user call to memset and the user may wish to
2639 examine the return value from memset.
2641 For targets where libcalls and normal calls have different
2642 conventions for returning pointers, we could end up generating
2643 incorrect code.
2645 So instead of using a libcall sequence we build up a suitable
2646 CALL_EXPR and expand the call in the normal fashion. */
2647 if (fn == NULL_TREE)
2649 tree fntype;
2651 /* This was copied from except.c, I don't know if all this is
2652 necessary in this context or not. */
2653 fn = get_identifier ("memset");
2654 fntype = build_pointer_type (void_type_node);
2655 fntype = build_function_type (fntype, NULL_TREE);
2656 fn = build_decl (FUNCTION_DECL, fn, fntype);
2657 ggc_add_tree_root (&fn, 1);
2658 DECL_EXTERNAL (fn) = 1;
2659 TREE_PUBLIC (fn) = 1;
2660 DECL_ARTIFICIAL (fn) = 1;
2661 make_decl_rtl (fn, NULL_PTR);
2662 assemble_external (fn);
2665 /* We need to make an argument list for the function call.
2667 memset has three arguments, the first is a void * addresses, the
2668 second a integer with the initialization value, the last is a
2669 size_t byte count for the copy. */
2670 arg_list
2671 = build_tree_list (NULL_TREE,
2672 make_tree (build_pointer_type (void_type_node),
2673 object));
2674 TREE_CHAIN (arg_list)
2675 = build_tree_list (NULL_TREE,
2676 make_tree (integer_type_node, const0_rtx));
2677 TREE_CHAIN (TREE_CHAIN (arg_list))
2678 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2679 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2681 /* Now we have to build up the CALL_EXPR itself. */
2682 call_expr = build1 (ADDR_EXPR,
2683 build_pointer_type (TREE_TYPE (fn)), fn);
2684 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2685 call_expr, arg_list, NULL_TREE);
2686 TREE_SIDE_EFFECTS (call_expr) = 1;
2688 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2689 #else
2690 emit_library_call (bzero_libfunc, LCT_NORMAL,
2691 VOIDmode, 2, object, Pmode, size,
2692 TYPE_MODE (integer_type_node));
2693 #endif
2697 return retval;
2700 /* Generate code to copy Y into X.
2701 Both Y and X must have the same mode, except that
2702 Y can be a constant with VOIDmode.
2703 This mode cannot be BLKmode; use emit_block_move for that.
2705 Return the last instruction emitted. */
2708 emit_move_insn (x, y)
2709 rtx x, y;
2711 enum machine_mode mode = GET_MODE (x);
2713 x = protect_from_queue (x, 1);
2714 y = protect_from_queue (y, 0);
2716 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2717 abort ();
2719 /* Never force constant_p_rtx to memory. */
2720 if (GET_CODE (y) == CONSTANT_P_RTX)
2722 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2723 y = force_const_mem (mode, y);
2725 /* If X or Y are memory references, verify that their addresses are valid
2726 for the machine. */
2727 if (GET_CODE (x) == MEM
2728 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2729 && ! push_operand (x, GET_MODE (x)))
2730 || (flag_force_addr
2731 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2732 x = change_address (x, VOIDmode, XEXP (x, 0));
2734 if (GET_CODE (y) == MEM
2735 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2736 || (flag_force_addr
2737 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2738 y = change_address (y, VOIDmode, XEXP (y, 0));
2740 if (mode == BLKmode)
2741 abort ();
2743 return emit_move_insn_1 (x, y);
2746 /* Low level part of emit_move_insn.
2747 Called just like emit_move_insn, but assumes X and Y
2748 are basically valid. */
2751 emit_move_insn_1 (x, y)
2752 rtx x, y;
2754 enum machine_mode mode = GET_MODE (x);
2755 enum machine_mode submode;
2756 enum mode_class class = GET_MODE_CLASS (mode);
2757 unsigned int i;
2759 if (mode >= MAX_MACHINE_MODE)
2760 abort ();
2762 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2763 return
2764 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2766 /* Expand complex moves by moving real part and imag part, if possible. */
2767 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2768 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2769 * BITS_PER_UNIT),
2770 (class == MODE_COMPLEX_INT
2771 ? MODE_INT : MODE_FLOAT),
2773 && (mov_optab->handlers[(int) submode].insn_code
2774 != CODE_FOR_nothing))
2776 /* Don't split destination if it is a stack push. */
2777 int stack = push_operand (x, GET_MODE (x));
2779 /* If this is a stack, push the highpart first, so it
2780 will be in the argument order.
2782 In that case, change_address is used only to convert
2783 the mode, not to change the address. */
2784 if (stack)
2786 /* Note that the real part always precedes the imag part in memory
2787 regardless of machine's endianness. */
2788 #ifdef STACK_GROWS_DOWNWARD
2789 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2790 (gen_rtx_MEM (submode, XEXP (x, 0)),
2791 gen_imagpart (submode, y)));
2792 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2793 (gen_rtx_MEM (submode, XEXP (x, 0)),
2794 gen_realpart (submode, y)));
2795 #else
2796 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2797 (gen_rtx_MEM (submode, XEXP (x, 0)),
2798 gen_realpart (submode, y)));
2799 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2800 (gen_rtx_MEM (submode, XEXP (x, 0)),
2801 gen_imagpart (submode, y)));
2802 #endif
2804 else
2806 rtx realpart_x, realpart_y;
2807 rtx imagpart_x, imagpart_y;
2809 /* If this is a complex value with each part being smaller than a
2810 word, the usual calling sequence will likely pack the pieces into
2811 a single register. Unfortunately, SUBREG of hard registers only
2812 deals in terms of words, so we have a problem converting input
2813 arguments to the CONCAT of two registers that is used elsewhere
2814 for complex values. If this is before reload, we can copy it into
2815 memory and reload. FIXME, we should see about using extract and
2816 insert on integer registers, but complex short and complex char
2817 variables should be rarely used. */
2818 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2819 && (reload_in_progress | reload_completed) == 0)
2821 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2822 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2824 if (packed_dest_p || packed_src_p)
2826 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2827 ? MODE_FLOAT : MODE_INT);
2829 enum machine_mode reg_mode
2830 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2832 if (reg_mode != BLKmode)
2834 rtx mem = assign_stack_temp (reg_mode,
2835 GET_MODE_SIZE (mode), 0);
2836 rtx cmem = change_address (mem, mode, NULL_RTX);
2838 cfun->cannot_inline
2839 = N_("function using short complex types cannot be inline");
2841 if (packed_dest_p)
2843 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2844 emit_move_insn_1 (cmem, y);
2845 return emit_move_insn_1 (sreg, mem);
2847 else
2849 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2850 emit_move_insn_1 (mem, sreg);
2851 return emit_move_insn_1 (x, cmem);
2857 realpart_x = gen_realpart (submode, x);
2858 realpart_y = gen_realpart (submode, y);
2859 imagpart_x = gen_imagpart (submode, x);
2860 imagpart_y = gen_imagpart (submode, y);
2862 /* Show the output dies here. This is necessary for SUBREGs
2863 of pseudos since we cannot track their lifetimes correctly;
2864 hard regs shouldn't appear here except as return values.
2865 We never want to emit such a clobber after reload. */
2866 if (x != y
2867 && ! (reload_in_progress || reload_completed)
2868 && (GET_CODE (realpart_x) == SUBREG
2869 || GET_CODE (imagpart_x) == SUBREG))
2871 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2874 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2875 (realpart_x, realpart_y));
2876 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2877 (imagpart_x, imagpart_y));
2880 return get_last_insn ();
2883 /* This will handle any multi-word mode that lacks a move_insn pattern.
2884 However, you will get better code if you define such patterns,
2885 even if they must turn into multiple assembler instructions. */
2886 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2888 rtx last_insn = 0;
2889 rtx seq, inner;
2890 int need_clobber;
2892 #ifdef PUSH_ROUNDING
2894 /* If X is a push on the stack, do the push now and replace
2895 X with a reference to the stack pointer. */
2896 if (push_operand (x, GET_MODE (x)))
2898 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2899 x = change_address (x, VOIDmode, stack_pointer_rtx);
2901 #endif
2903 /* If we are in reload, see if either operand is a MEM whose address
2904 is scheduled for replacement. */
2905 if (reload_in_progress && GET_CODE (x) == MEM
2906 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2908 rtx new = gen_rtx_MEM (GET_MODE (x), inner);
2910 MEM_COPY_ATTRIBUTES (new, x);
2911 x = new;
2913 if (reload_in_progress && GET_CODE (y) == MEM
2914 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2916 rtx new = gen_rtx_MEM (GET_MODE (y), inner);
2918 MEM_COPY_ATTRIBUTES (new, y);
2919 y = new;
2922 start_sequence ();
2924 need_clobber = 0;
2925 for (i = 0;
2926 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2927 i++)
2929 rtx xpart = operand_subword (x, i, 1, mode);
2930 rtx ypart = operand_subword (y, i, 1, mode);
2932 /* If we can't get a part of Y, put Y into memory if it is a
2933 constant. Otherwise, force it into a register. If we still
2934 can't get a part of Y, abort. */
2935 if (ypart == 0 && CONSTANT_P (y))
2937 y = force_const_mem (mode, y);
2938 ypart = operand_subword (y, i, 1, mode);
2940 else if (ypart == 0)
2941 ypart = operand_subword_force (y, i, mode);
2943 if (xpart == 0 || ypart == 0)
2944 abort ();
2946 need_clobber |= (GET_CODE (xpart) == SUBREG);
2948 last_insn = emit_move_insn (xpart, ypart);
2951 seq = gen_sequence ();
2952 end_sequence ();
2954 /* Show the output dies here. This is necessary for SUBREGs
2955 of pseudos since we cannot track their lifetimes correctly;
2956 hard regs shouldn't appear here except as return values.
2957 We never want to emit such a clobber after reload. */
2958 if (x != y
2959 && ! (reload_in_progress || reload_completed)
2960 && need_clobber != 0)
2962 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2965 emit_insn (seq);
2967 return last_insn;
2969 else
2970 abort ();
2973 /* Pushing data onto the stack. */
2975 /* Push a block of length SIZE (perhaps variable)
2976 and return an rtx to address the beginning of the block.
2977 Note that it is not possible for the value returned to be a QUEUED.
2978 The value may be virtual_outgoing_args_rtx.
2980 EXTRA is the number of bytes of padding to push in addition to SIZE.
2981 BELOW nonzero means this padding comes at low addresses;
2982 otherwise, the padding comes at high addresses. */
2985 push_block (size, extra, below)
2986 rtx size;
2987 int extra, below;
2989 register rtx temp;
2991 size = convert_modes (Pmode, ptr_mode, size, 1);
2992 if (CONSTANT_P (size))
2993 anti_adjust_stack (plus_constant (size, extra));
2994 else if (GET_CODE (size) == REG && extra == 0)
2995 anti_adjust_stack (size);
2996 else
2998 temp = copy_to_mode_reg (Pmode, size);
2999 if (extra != 0)
3000 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3001 temp, 0, OPTAB_LIB_WIDEN);
3002 anti_adjust_stack (temp);
3005 #ifndef STACK_GROWS_DOWNWARD
3006 #ifdef ARGS_GROW_DOWNWARD
3007 if (!ACCUMULATE_OUTGOING_ARGS)
3008 #else
3009 if (0)
3010 #endif
3011 #else
3012 if (1)
3013 #endif
3015 /* Return the lowest stack address when STACK or ARGS grow downward and
3016 we are not aaccumulating outgoing arguments (the c4x port uses such
3017 conventions). */
3018 temp = virtual_outgoing_args_rtx;
3019 if (extra != 0 && below)
3020 temp = plus_constant (temp, extra);
3022 else
3024 if (GET_CODE (size) == CONST_INT)
3025 temp = plus_constant (virtual_outgoing_args_rtx,
3026 -INTVAL (size) - (below ? 0 : extra));
3027 else if (extra != 0 && !below)
3028 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3029 negate_rtx (Pmode, plus_constant (size, extra)));
3030 else
3031 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3032 negate_rtx (Pmode, size));
3035 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3039 gen_push_operand ()
3041 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3044 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
3045 block of SIZE bytes. */
3047 static rtx
3048 get_push_address (size)
3049 int size;
3051 register rtx temp;
3053 if (STACK_PUSH_CODE == POST_DEC)
3054 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3055 else if (STACK_PUSH_CODE == POST_INC)
3056 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3057 else
3058 temp = stack_pointer_rtx;
3060 return copy_to_reg (temp);
3063 /* Generate code to push X onto the stack, assuming it has mode MODE and
3064 type TYPE.
3065 MODE is redundant except when X is a CONST_INT (since they don't
3066 carry mode info).
3067 SIZE is an rtx for the size of data to be copied (in bytes),
3068 needed only if X is BLKmode.
3070 ALIGN is maximum alignment we can assume.
3072 If PARTIAL and REG are both nonzero, then copy that many of the first
3073 words of X into registers starting with REG, and push the rest of X.
3074 The amount of space pushed is decreased by PARTIAL words,
3075 rounded *down* to a multiple of PARM_BOUNDARY.
3076 REG must be a hard register in this case.
3077 If REG is zero but PARTIAL is not, take any all others actions for an
3078 argument partially in registers, but do not actually load any
3079 registers.
3081 EXTRA is the amount in bytes of extra space to leave next to this arg.
3082 This is ignored if an argument block has already been allocated.
3084 On a machine that lacks real push insns, ARGS_ADDR is the address of
3085 the bottom of the argument block for this call. We use indexing off there
3086 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3087 argument block has not been preallocated.
3089 ARGS_SO_FAR is the size of args previously pushed for this call.
3091 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3092 for arguments passed in registers. If nonzero, it will be the number
3093 of bytes required. */
3095 void
3096 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3097 args_addr, args_so_far, reg_parm_stack_space,
3098 alignment_pad)
3099 register rtx x;
3100 enum machine_mode mode;
3101 tree type;
3102 rtx size;
3103 unsigned int align;
3104 int partial;
3105 rtx reg;
3106 int extra;
3107 rtx args_addr;
3108 rtx args_so_far;
3109 int reg_parm_stack_space;
3110 rtx alignment_pad;
3112 rtx xinner;
3113 enum direction stack_direction
3114 #ifdef STACK_GROWS_DOWNWARD
3115 = downward;
3116 #else
3117 = upward;
3118 #endif
3120 /* Decide where to pad the argument: `downward' for below,
3121 `upward' for above, or `none' for don't pad it.
3122 Default is below for small data on big-endian machines; else above. */
3123 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3125 /* Invert direction if stack is post-update. */
3126 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
3127 if (where_pad != none)
3128 where_pad = (where_pad == downward ? upward : downward);
3130 xinner = x = protect_from_queue (x, 0);
3132 if (mode == BLKmode)
3134 /* Copy a block into the stack, entirely or partially. */
3136 register rtx temp;
3137 int used = partial * UNITS_PER_WORD;
3138 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3139 int skip;
3141 if (size == 0)
3142 abort ();
3144 used -= offset;
3146 /* USED is now the # of bytes we need not copy to the stack
3147 because registers will take care of them. */
3149 if (partial != 0)
3150 xinner = change_address (xinner, BLKmode,
3151 plus_constant (XEXP (xinner, 0), used));
3153 /* If the partial register-part of the arg counts in its stack size,
3154 skip the part of stack space corresponding to the registers.
3155 Otherwise, start copying to the beginning of the stack space,
3156 by setting SKIP to 0. */
3157 skip = (reg_parm_stack_space == 0) ? 0 : used;
3159 #ifdef PUSH_ROUNDING
3160 /* Do it with several push insns if that doesn't take lots of insns
3161 and if there is no difficulty with push insns that skip bytes
3162 on the stack for alignment purposes. */
3163 if (args_addr == 0
3164 && PUSH_ARGS
3165 && GET_CODE (size) == CONST_INT
3166 && skip == 0
3167 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3168 /* Here we avoid the case of a structure whose weak alignment
3169 forces many pushes of a small amount of data,
3170 and such small pushes do rounding that causes trouble. */
3171 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3172 || align >= BIGGEST_ALIGNMENT
3173 || PUSH_ROUNDING (align) == align)
3174 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3176 /* Push padding now if padding above and stack grows down,
3177 or if padding below and stack grows up.
3178 But if space already allocated, this has already been done. */
3179 if (extra && args_addr == 0
3180 && where_pad != none && where_pad != stack_direction)
3181 anti_adjust_stack (GEN_INT (extra));
3183 stack_pointer_delta += INTVAL (size) - used;
3184 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
3185 INTVAL (size) - used, align);
3187 if (current_function_check_memory_usage && ! in_check_memory_usage)
3189 rtx temp;
3191 in_check_memory_usage = 1;
3192 temp = get_push_address (INTVAL (size) - used);
3193 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3194 emit_library_call (chkr_copy_bitmap_libfunc,
3195 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3196 Pmode, XEXP (xinner, 0), Pmode,
3197 GEN_INT (INTVAL (size) - used),
3198 TYPE_MODE (sizetype));
3199 else
3200 emit_library_call (chkr_set_right_libfunc,
3201 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3202 Pmode, GEN_INT (INTVAL (size) - used),
3203 TYPE_MODE (sizetype),
3204 GEN_INT (MEMORY_USE_RW),
3205 TYPE_MODE (integer_type_node));
3206 in_check_memory_usage = 0;
3209 else
3210 #endif /* PUSH_ROUNDING */
3212 rtx target;
3214 /* Otherwise make space on the stack and copy the data
3215 to the address of that space. */
3217 /* Deduct words put into registers from the size we must copy. */
3218 if (partial != 0)
3220 if (GET_CODE (size) == CONST_INT)
3221 size = GEN_INT (INTVAL (size) - used);
3222 else
3223 size = expand_binop (GET_MODE (size), sub_optab, size,
3224 GEN_INT (used), NULL_RTX, 0,
3225 OPTAB_LIB_WIDEN);
3228 /* Get the address of the stack space.
3229 In this case, we do not deal with EXTRA separately.
3230 A single stack adjust will do. */
3231 if (! args_addr)
3233 temp = push_block (size, extra, where_pad == downward);
3234 extra = 0;
3236 else if (GET_CODE (args_so_far) == CONST_INT)
3237 temp = memory_address (BLKmode,
3238 plus_constant (args_addr,
3239 skip + INTVAL (args_so_far)));
3240 else
3241 temp = memory_address (BLKmode,
3242 plus_constant (gen_rtx_PLUS (Pmode,
3243 args_addr,
3244 args_so_far),
3245 skip));
3246 if (current_function_check_memory_usage && ! in_check_memory_usage)
3248 in_check_memory_usage = 1;
3249 target = copy_to_reg (temp);
3250 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3251 emit_library_call (chkr_copy_bitmap_libfunc,
3252 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3253 target, Pmode,
3254 XEXP (xinner, 0), Pmode,
3255 size, TYPE_MODE (sizetype));
3256 else
3257 emit_library_call (chkr_set_right_libfunc,
3258 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3259 target, Pmode,
3260 size, TYPE_MODE (sizetype),
3261 GEN_INT (MEMORY_USE_RW),
3262 TYPE_MODE (integer_type_node));
3263 in_check_memory_usage = 0;
3266 target = gen_rtx_MEM (BLKmode, temp);
3268 if (type != 0)
3270 set_mem_attributes (target, type, 1);
3271 /* Function incoming arguments may overlap with sibling call
3272 outgoing arguments and we cannot allow reordering of reads
3273 from function arguments with stores to outgoing arguments
3274 of sibling calls. */
3275 MEM_ALIAS_SET (target) = 0;
3278 /* TEMP is the address of the block. Copy the data there. */
3279 if (GET_CODE (size) == CONST_INT
3280 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3282 move_by_pieces (target, xinner, INTVAL (size), align);
3283 goto ret;
3285 else
3287 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3288 enum machine_mode mode;
3290 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3291 mode != VOIDmode;
3292 mode = GET_MODE_WIDER_MODE (mode))
3294 enum insn_code code = movstr_optab[(int) mode];
3295 insn_operand_predicate_fn pred;
3297 if (code != CODE_FOR_nothing
3298 && ((GET_CODE (size) == CONST_INT
3299 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3300 <= (GET_MODE_MASK (mode) >> 1)))
3301 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3302 && (!(pred = insn_data[(int) code].operand[0].predicate)
3303 || ((*pred) (target, BLKmode)))
3304 && (!(pred = insn_data[(int) code].operand[1].predicate)
3305 || ((*pred) (xinner, BLKmode)))
3306 && (!(pred = insn_data[(int) code].operand[3].predicate)
3307 || ((*pred) (opalign, VOIDmode))))
3309 rtx op2 = convert_to_mode (mode, size, 1);
3310 rtx last = get_last_insn ();
3311 rtx pat;
3313 pred = insn_data[(int) code].operand[2].predicate;
3314 if (pred != 0 && ! (*pred) (op2, mode))
3315 op2 = copy_to_mode_reg (mode, op2);
3317 pat = GEN_FCN ((int) code) (target, xinner,
3318 op2, opalign);
3319 if (pat)
3321 emit_insn (pat);
3322 goto ret;
3324 else
3325 delete_insns_since (last);
3330 if (!ACCUMULATE_OUTGOING_ARGS)
3332 /* If the source is referenced relative to the stack pointer,
3333 copy it to another register to stabilize it. We do not need
3334 to do this if we know that we won't be changing sp. */
3336 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3337 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3338 temp = copy_to_reg (temp);
3341 /* Make inhibit_defer_pop nonzero around the library call
3342 to force it to pop the bcopy-arguments right away. */
3343 NO_DEFER_POP;
3344 #ifdef TARGET_MEM_FUNCTIONS
3345 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3346 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3347 convert_to_mode (TYPE_MODE (sizetype),
3348 size, TREE_UNSIGNED (sizetype)),
3349 TYPE_MODE (sizetype));
3350 #else
3351 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3352 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3353 convert_to_mode (TYPE_MODE (integer_type_node),
3354 size,
3355 TREE_UNSIGNED (integer_type_node)),
3356 TYPE_MODE (integer_type_node));
3357 #endif
3358 OK_DEFER_POP;
3361 else if (partial > 0)
3363 /* Scalar partly in registers. */
3365 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3366 int i;
3367 int not_stack;
3368 /* # words of start of argument
3369 that we must make space for but need not store. */
3370 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3371 int args_offset = INTVAL (args_so_far);
3372 int skip;
3374 /* Push padding now if padding above and stack grows down,
3375 or if padding below and stack grows up.
3376 But if space already allocated, this has already been done. */
3377 if (extra && args_addr == 0
3378 && where_pad != none && where_pad != stack_direction)
3379 anti_adjust_stack (GEN_INT (extra));
3381 /* If we make space by pushing it, we might as well push
3382 the real data. Otherwise, we can leave OFFSET nonzero
3383 and leave the space uninitialized. */
3384 if (args_addr == 0)
3385 offset = 0;
3387 /* Now NOT_STACK gets the number of words that we don't need to
3388 allocate on the stack. */
3389 not_stack = partial - offset;
3391 /* If the partial register-part of the arg counts in its stack size,
3392 skip the part of stack space corresponding to the registers.
3393 Otherwise, start copying to the beginning of the stack space,
3394 by setting SKIP to 0. */
3395 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3397 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3398 x = validize_mem (force_const_mem (mode, x));
3400 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3401 SUBREGs of such registers are not allowed. */
3402 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3403 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3404 x = copy_to_reg (x);
3406 /* Loop over all the words allocated on the stack for this arg. */
3407 /* We can do it by words, because any scalar bigger than a word
3408 has a size a multiple of a word. */
3409 #ifndef PUSH_ARGS_REVERSED
3410 for (i = not_stack; i < size; i++)
3411 #else
3412 for (i = size - 1; i >= not_stack; i--)
3413 #endif
3414 if (i >= not_stack + offset)
3415 emit_push_insn (operand_subword_force (x, i, mode),
3416 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3417 0, args_addr,
3418 GEN_INT (args_offset + ((i - not_stack + skip)
3419 * UNITS_PER_WORD)),
3420 reg_parm_stack_space, alignment_pad);
3422 else
3424 rtx addr;
3425 rtx target = NULL_RTX;
3426 rtx dest;
3428 /* Push padding now if padding above and stack grows down,
3429 or if padding below and stack grows up.
3430 But if space already allocated, this has already been done. */
3431 if (extra && args_addr == 0
3432 && where_pad != none && where_pad != stack_direction)
3433 anti_adjust_stack (GEN_INT (extra));
3435 #ifdef PUSH_ROUNDING
3436 if (args_addr == 0 && PUSH_ARGS)
3438 addr = gen_push_operand ();
3439 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3441 else
3442 #endif
3444 if (GET_CODE (args_so_far) == CONST_INT)
3445 addr
3446 = memory_address (mode,
3447 plus_constant (args_addr,
3448 INTVAL (args_so_far)));
3449 else
3450 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3451 args_so_far));
3452 target = addr;
3455 dest = gen_rtx_MEM (mode, addr);
3456 if (type != 0)
3458 set_mem_attributes (dest, type, 1);
3459 /* Function incoming arguments may overlap with sibling call
3460 outgoing arguments and we cannot allow reordering of reads
3461 from function arguments with stores to outgoing arguments
3462 of sibling calls. */
3463 MEM_ALIAS_SET (dest) = 0;
3466 emit_move_insn (dest, x);
3468 if (current_function_check_memory_usage && ! in_check_memory_usage)
3470 in_check_memory_usage = 1;
3471 if (target == 0)
3472 target = get_push_address (GET_MODE_SIZE (mode));
3474 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3475 emit_library_call (chkr_copy_bitmap_libfunc,
3476 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3477 Pmode, XEXP (x, 0), Pmode,
3478 GEN_INT (GET_MODE_SIZE (mode)),
3479 TYPE_MODE (sizetype));
3480 else
3481 emit_library_call (chkr_set_right_libfunc,
3482 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3483 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
3484 TYPE_MODE (sizetype),
3485 GEN_INT (MEMORY_USE_RW),
3486 TYPE_MODE (integer_type_node));
3487 in_check_memory_usage = 0;
3491 ret:
3492 /* If part should go in registers, copy that part
3493 into the appropriate registers. Do this now, at the end,
3494 since mem-to-mem copies above may do function calls. */
3495 if (partial > 0 && reg != 0)
3497 /* Handle calls that pass values in multiple non-contiguous locations.
3498 The Irix 6 ABI has examples of this. */
3499 if (GET_CODE (reg) == PARALLEL)
3500 emit_group_load (reg, x, -1, align); /* ??? size? */
3501 else
3502 move_block_to_reg (REGNO (reg), x, partial, mode);
3505 if (extra && args_addr == 0 && where_pad == stack_direction)
3506 anti_adjust_stack (GEN_INT (extra));
3508 if (alignment_pad && args_addr == 0)
3509 anti_adjust_stack (alignment_pad);
3512 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3513 operations. */
3515 static rtx
3516 get_subtarget (x)
3517 rtx x;
3519 return ((x == 0
3520 /* Only registers can be subtargets. */
3521 || GET_CODE (x) != REG
3522 /* If the register is readonly, it can't be set more than once. */
3523 || RTX_UNCHANGING_P (x)
3524 /* Don't use hard regs to avoid extending their life. */
3525 || REGNO (x) < FIRST_PSEUDO_REGISTER
3526 /* Avoid subtargets inside loops,
3527 since they hide some invariant expressions. */
3528 || preserve_subexpressions_p ())
3529 ? 0 : x);
3532 /* Expand an assignment that stores the value of FROM into TO.
3533 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3534 (This may contain a QUEUED rtx;
3535 if the value is constant, this rtx is a constant.)
3536 Otherwise, the returned value is NULL_RTX.
3538 SUGGEST_REG is no longer actually used.
3539 It used to mean, copy the value through a register
3540 and return that register, if that is possible.
3541 We now use WANT_VALUE to decide whether to do this. */
3544 expand_assignment (to, from, want_value, suggest_reg)
3545 tree to, from;
3546 int want_value;
3547 int suggest_reg ATTRIBUTE_UNUSED;
3549 register rtx to_rtx = 0;
3550 rtx result;
3552 /* Don't crash if the lhs of the assignment was erroneous. */
3554 if (TREE_CODE (to) == ERROR_MARK)
3556 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3557 return want_value ? result : NULL_RTX;
3560 /* Assignment of a structure component needs special treatment
3561 if the structure component's rtx is not simply a MEM.
3562 Assignment of an array element at a constant index, and assignment of
3563 an array element in an unaligned packed structure field, has the same
3564 problem. */
3566 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3567 || TREE_CODE (to) == ARRAY_REF)
3569 enum machine_mode mode1;
3570 HOST_WIDE_INT bitsize, bitpos;
3571 tree offset;
3572 int unsignedp;
3573 int volatilep = 0;
3574 tree tem;
3575 unsigned int alignment;
3577 push_temp_slots ();
3578 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3579 &unsignedp, &volatilep, &alignment);
3581 /* If we are going to use store_bit_field and extract_bit_field,
3582 make sure to_rtx will be safe for multiple use. */
3584 if (mode1 == VOIDmode && want_value)
3585 tem = stabilize_reference (tem);
3587 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3588 if (offset != 0)
3590 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3592 if (GET_CODE (to_rtx) != MEM)
3593 abort ();
3595 if (GET_MODE (offset_rtx) != ptr_mode)
3597 #ifdef POINTERS_EXTEND_UNSIGNED
3598 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3599 #else
3600 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3601 #endif
3604 /* A constant address in TO_RTX can have VOIDmode, we must not try
3605 to call force_reg for that case. Avoid that case. */
3606 if (GET_CODE (to_rtx) == MEM
3607 && GET_MODE (to_rtx) == BLKmode
3608 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3609 && bitsize
3610 && (bitpos % bitsize) == 0
3611 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3612 && alignment == GET_MODE_ALIGNMENT (mode1))
3614 rtx temp = change_address (to_rtx, mode1,
3615 plus_constant (XEXP (to_rtx, 0),
3616 (bitpos /
3617 BITS_PER_UNIT)));
3618 if (GET_CODE (XEXP (temp, 0)) == REG)
3619 to_rtx = temp;
3620 else
3621 to_rtx = change_address (to_rtx, mode1,
3622 force_reg (GET_MODE (XEXP (temp, 0)),
3623 XEXP (temp, 0)));
3624 bitpos = 0;
3627 to_rtx = change_address (to_rtx, VOIDmode,
3628 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3629 force_reg (ptr_mode,
3630 offset_rtx)));
3633 if (volatilep)
3635 if (GET_CODE (to_rtx) == MEM)
3637 /* When the offset is zero, to_rtx is the address of the
3638 structure we are storing into, and hence may be shared.
3639 We must make a new MEM before setting the volatile bit. */
3640 if (offset == 0)
3641 to_rtx = copy_rtx (to_rtx);
3643 MEM_VOLATILE_P (to_rtx) = 1;
3645 #if 0 /* This was turned off because, when a field is volatile
3646 in an object which is not volatile, the object may be in a register,
3647 and then we would abort over here. */
3648 else
3649 abort ();
3650 #endif
3653 if (TREE_CODE (to) == COMPONENT_REF
3654 && TREE_READONLY (TREE_OPERAND (to, 1)))
3656 if (offset == 0)
3657 to_rtx = copy_rtx (to_rtx);
3659 RTX_UNCHANGING_P (to_rtx) = 1;
3662 /* Check the access. */
3663 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3665 rtx to_addr;
3666 int size;
3667 int best_mode_size;
3668 enum machine_mode best_mode;
3670 best_mode = get_best_mode (bitsize, bitpos,
3671 TYPE_ALIGN (TREE_TYPE (tem)),
3672 mode1, volatilep);
3673 if (best_mode == VOIDmode)
3674 best_mode = QImode;
3676 best_mode_size = GET_MODE_BITSIZE (best_mode);
3677 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3678 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3679 size *= GET_MODE_SIZE (best_mode);
3681 /* Check the access right of the pointer. */
3682 in_check_memory_usage = 1;
3683 if (size)
3684 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3685 VOIDmode, 3, to_addr, Pmode,
3686 GEN_INT (size), TYPE_MODE (sizetype),
3687 GEN_INT (MEMORY_USE_WO),
3688 TYPE_MODE (integer_type_node));
3689 in_check_memory_usage = 0;
3692 /* If this is a varying-length object, we must get the address of
3693 the source and do an explicit block move. */
3694 if (bitsize < 0)
3696 unsigned int from_align;
3697 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3698 rtx inner_to_rtx
3699 = change_address (to_rtx, VOIDmode,
3700 plus_constant (XEXP (to_rtx, 0),
3701 bitpos / BITS_PER_UNIT));
3703 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
3704 MIN (alignment, from_align));
3705 free_temp_slots ();
3706 pop_temp_slots ();
3707 return to_rtx;
3709 else
3711 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3712 (want_value
3713 /* Spurious cast for HPUX compiler. */
3714 ? ((enum machine_mode)
3715 TYPE_MODE (TREE_TYPE (to)))
3716 : VOIDmode),
3717 unsignedp,
3718 alignment,
3719 int_size_in_bytes (TREE_TYPE (tem)),
3720 get_alias_set (to));
3722 preserve_temp_slots (result);
3723 free_temp_slots ();
3724 pop_temp_slots ();
3726 /* If the value is meaningful, convert RESULT to the proper mode.
3727 Otherwise, return nothing. */
3728 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3729 TYPE_MODE (TREE_TYPE (from)),
3730 result,
3731 TREE_UNSIGNED (TREE_TYPE (to)))
3732 : NULL_RTX);
3736 /* If the rhs is a function call and its value is not an aggregate,
3737 call the function before we start to compute the lhs.
3738 This is needed for correct code for cases such as
3739 val = setjmp (buf) on machines where reference to val
3740 requires loading up part of an address in a separate insn.
3742 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3743 since it might be a promoted variable where the zero- or sign- extension
3744 needs to be done. Handling this in the normal way is safe because no
3745 computation is done before the call. */
3746 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3747 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3748 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3749 && GET_CODE (DECL_RTL (to)) == REG))
3751 rtx value;
3753 push_temp_slots ();
3754 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3755 if (to_rtx == 0)
3756 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3758 /* Handle calls that return values in multiple non-contiguous locations.
3759 The Irix 6 ABI has examples of this. */
3760 if (GET_CODE (to_rtx) == PARALLEL)
3761 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3762 TYPE_ALIGN (TREE_TYPE (from)));
3763 else if (GET_MODE (to_rtx) == BLKmode)
3764 emit_block_move (to_rtx, value, expr_size (from),
3765 TYPE_ALIGN (TREE_TYPE (from)));
3766 else
3768 #ifdef POINTERS_EXTEND_UNSIGNED
3769 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3770 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3771 value = convert_memory_address (GET_MODE (to_rtx), value);
3772 #endif
3773 emit_move_insn (to_rtx, value);
3775 preserve_temp_slots (to_rtx);
3776 free_temp_slots ();
3777 pop_temp_slots ();
3778 return want_value ? to_rtx : NULL_RTX;
3781 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3782 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3784 if (to_rtx == 0)
3786 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3787 if (GET_CODE (to_rtx) == MEM)
3788 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3791 /* Don't move directly into a return register. */
3792 if (TREE_CODE (to) == RESULT_DECL
3793 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3795 rtx temp;
3797 push_temp_slots ();
3798 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3800 if (GET_CODE (to_rtx) == PARALLEL)
3801 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3802 TYPE_ALIGN (TREE_TYPE (from)));
3803 else
3804 emit_move_insn (to_rtx, temp);
3806 preserve_temp_slots (to_rtx);
3807 free_temp_slots ();
3808 pop_temp_slots ();
3809 return want_value ? to_rtx : NULL_RTX;
3812 /* In case we are returning the contents of an object which overlaps
3813 the place the value is being stored, use a safe function when copying
3814 a value through a pointer into a structure value return block. */
3815 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3816 && current_function_returns_struct
3817 && !current_function_returns_pcc_struct)
3819 rtx from_rtx, size;
3821 push_temp_slots ();
3822 size = expr_size (from);
3823 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3824 EXPAND_MEMORY_USE_DONT);
3826 /* Copy the rights of the bitmap. */
3827 if (current_function_check_memory_usage)
3828 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3829 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3830 XEXP (from_rtx, 0), Pmode,
3831 convert_to_mode (TYPE_MODE (sizetype),
3832 size, TREE_UNSIGNED (sizetype)),
3833 TYPE_MODE (sizetype));
3835 #ifdef TARGET_MEM_FUNCTIONS
3836 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3837 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3838 XEXP (from_rtx, 0), Pmode,
3839 convert_to_mode (TYPE_MODE (sizetype),
3840 size, TREE_UNSIGNED (sizetype)),
3841 TYPE_MODE (sizetype));
3842 #else
3843 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3844 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3845 XEXP (to_rtx, 0), Pmode,
3846 convert_to_mode (TYPE_MODE (integer_type_node),
3847 size, TREE_UNSIGNED (integer_type_node)),
3848 TYPE_MODE (integer_type_node));
3849 #endif
3851 preserve_temp_slots (to_rtx);
3852 free_temp_slots ();
3853 pop_temp_slots ();
3854 return want_value ? to_rtx : NULL_RTX;
3857 /* Compute FROM and store the value in the rtx we got. */
3859 push_temp_slots ();
3860 result = store_expr (from, to_rtx, want_value);
3861 preserve_temp_slots (result);
3862 free_temp_slots ();
3863 pop_temp_slots ();
3864 return want_value ? result : NULL_RTX;
3867 /* Generate code for computing expression EXP,
3868 and storing the value into TARGET.
3869 TARGET may contain a QUEUED rtx.
3871 If WANT_VALUE is nonzero, return a copy of the value
3872 not in TARGET, so that we can be sure to use the proper
3873 value in a containing expression even if TARGET has something
3874 else stored in it. If possible, we copy the value through a pseudo
3875 and return that pseudo. Or, if the value is constant, we try to
3876 return the constant. In some cases, we return a pseudo
3877 copied *from* TARGET.
3879 If the mode is BLKmode then we may return TARGET itself.
3880 It turns out that in BLKmode it doesn't cause a problem.
3881 because C has no operators that could combine two different
3882 assignments into the same BLKmode object with different values
3883 with no sequence point. Will other languages need this to
3884 be more thorough?
3886 If WANT_VALUE is 0, we return NULL, to make sure
3887 to catch quickly any cases where the caller uses the value
3888 and fails to set WANT_VALUE. */
3891 store_expr (exp, target, want_value)
3892 register tree exp;
3893 register rtx target;
3894 int want_value;
3896 register rtx temp;
3897 int dont_return_target = 0;
3899 if (TREE_CODE (exp) == COMPOUND_EXPR)
3901 /* Perform first part of compound expression, then assign from second
3902 part. */
3903 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3904 emit_queue ();
3905 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3907 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3909 /* For conditional expression, get safe form of the target. Then
3910 test the condition, doing the appropriate assignment on either
3911 side. This avoids the creation of unnecessary temporaries.
3912 For non-BLKmode, it is more efficient not to do this. */
3914 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3916 emit_queue ();
3917 target = protect_from_queue (target, 1);
3919 do_pending_stack_adjust ();
3920 NO_DEFER_POP;
3921 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3922 start_cleanup_deferral ();
3923 store_expr (TREE_OPERAND (exp, 1), target, 0);
3924 end_cleanup_deferral ();
3925 emit_queue ();
3926 emit_jump_insn (gen_jump (lab2));
3927 emit_barrier ();
3928 emit_label (lab1);
3929 start_cleanup_deferral ();
3930 store_expr (TREE_OPERAND (exp, 2), target, 0);
3931 end_cleanup_deferral ();
3932 emit_queue ();
3933 emit_label (lab2);
3934 OK_DEFER_POP;
3936 return want_value ? target : NULL_RTX;
3938 else if (queued_subexp_p (target))
3939 /* If target contains a postincrement, let's not risk
3940 using it as the place to generate the rhs. */
3942 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3944 /* Expand EXP into a new pseudo. */
3945 temp = gen_reg_rtx (GET_MODE (target));
3946 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3948 else
3949 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3951 /* If target is volatile, ANSI requires accessing the value
3952 *from* the target, if it is accessed. So make that happen.
3953 In no case return the target itself. */
3954 if (! MEM_VOLATILE_P (target) && want_value)
3955 dont_return_target = 1;
3957 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3958 && GET_MODE (target) != BLKmode)
3959 /* If target is in memory and caller wants value in a register instead,
3960 arrange that. Pass TARGET as target for expand_expr so that,
3961 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3962 We know expand_expr will not use the target in that case.
3963 Don't do this if TARGET is volatile because we are supposed
3964 to write it and then read it. */
3966 temp = expand_expr (exp, target, GET_MODE (target), 0);
3967 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3968 temp = copy_to_reg (temp);
3969 dont_return_target = 1;
3971 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3972 /* If this is an scalar in a register that is stored in a wider mode
3973 than the declared mode, compute the result into its declared mode
3974 and then convert to the wider mode. Our value is the computed
3975 expression. */
3977 /* If we don't want a value, we can do the conversion inside EXP,
3978 which will often result in some optimizations. Do the conversion
3979 in two steps: first change the signedness, if needed, then
3980 the extend. But don't do this if the type of EXP is a subtype
3981 of something else since then the conversion might involve
3982 more than just converting modes. */
3983 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3984 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3986 if (TREE_UNSIGNED (TREE_TYPE (exp))
3987 != SUBREG_PROMOTED_UNSIGNED_P (target))
3989 = convert
3990 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3991 TREE_TYPE (exp)),
3992 exp);
3994 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3995 SUBREG_PROMOTED_UNSIGNED_P (target)),
3996 exp);
3999 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4001 /* If TEMP is a volatile MEM and we want a result value, make
4002 the access now so it gets done only once. Likewise if
4003 it contains TARGET. */
4004 if (GET_CODE (temp) == MEM && want_value
4005 && (MEM_VOLATILE_P (temp)
4006 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4007 temp = copy_to_reg (temp);
4009 /* If TEMP is a VOIDmode constant, use convert_modes to make
4010 sure that we properly convert it. */
4011 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4012 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4013 TYPE_MODE (TREE_TYPE (exp)), temp,
4014 SUBREG_PROMOTED_UNSIGNED_P (target));
4016 convert_move (SUBREG_REG (target), temp,
4017 SUBREG_PROMOTED_UNSIGNED_P (target));
4019 /* If we promoted a constant, change the mode back down to match
4020 target. Otherwise, the caller might get confused by a result whose
4021 mode is larger than expected. */
4023 if (want_value && GET_MODE (temp) != GET_MODE (target)
4024 && GET_MODE (temp) != VOIDmode)
4026 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
4027 SUBREG_PROMOTED_VAR_P (temp) = 1;
4028 SUBREG_PROMOTED_UNSIGNED_P (temp)
4029 = SUBREG_PROMOTED_UNSIGNED_P (target);
4032 return want_value ? temp : NULL_RTX;
4034 else
4036 temp = expand_expr (exp, target, GET_MODE (target), 0);
4037 /* Return TARGET if it's a specified hardware register.
4038 If TARGET is a volatile mem ref, either return TARGET
4039 or return a reg copied *from* TARGET; ANSI requires this.
4041 Otherwise, if TEMP is not TARGET, return TEMP
4042 if it is constant (for efficiency),
4043 or if we really want the correct value. */
4044 if (!(target && GET_CODE (target) == REG
4045 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4046 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4047 && ! rtx_equal_p (temp, target)
4048 && (CONSTANT_P (temp) || want_value))
4049 dont_return_target = 1;
4052 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4053 the same as that of TARGET, adjust the constant. This is needed, for
4054 example, in case it is a CONST_DOUBLE and we want only a word-sized
4055 value. */
4056 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4057 && TREE_CODE (exp) != ERROR_MARK
4058 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4059 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4060 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4062 if (current_function_check_memory_usage
4063 && GET_CODE (target) == MEM
4064 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
4066 in_check_memory_usage = 1;
4067 if (GET_CODE (temp) == MEM)
4068 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
4069 VOIDmode, 3, XEXP (target, 0), Pmode,
4070 XEXP (temp, 0), Pmode,
4071 expr_size (exp), TYPE_MODE (sizetype));
4072 else
4073 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
4074 VOIDmode, 3, XEXP (target, 0), Pmode,
4075 expr_size (exp), TYPE_MODE (sizetype),
4076 GEN_INT (MEMORY_USE_WO),
4077 TYPE_MODE (integer_type_node));
4078 in_check_memory_usage = 0;
4081 /* If value was not generated in the target, store it there.
4082 Convert the value to TARGET's type first if nec. */
4083 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4084 one or both of them are volatile memory refs, we have to distinguish
4085 two cases:
4086 - expand_expr has used TARGET. In this case, we must not generate
4087 another copy. This can be detected by TARGET being equal according
4088 to == .
4089 - expand_expr has not used TARGET - that means that the source just
4090 happens to have the same RTX form. Since temp will have been created
4091 by expand_expr, it will compare unequal according to == .
4092 We must generate a copy in this case, to reach the correct number
4093 of volatile memory references. */
4095 if ((! rtx_equal_p (temp, target)
4096 || (temp != target && (side_effects_p (temp)
4097 || side_effects_p (target))))
4098 && TREE_CODE (exp) != ERROR_MARK)
4100 target = protect_from_queue (target, 1);
4101 if (GET_MODE (temp) != GET_MODE (target)
4102 && GET_MODE (temp) != VOIDmode)
4104 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4105 if (dont_return_target)
4107 /* In this case, we will return TEMP,
4108 so make sure it has the proper mode.
4109 But don't forget to store the value into TARGET. */
4110 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4111 emit_move_insn (target, temp);
4113 else
4114 convert_move (target, temp, unsignedp);
4117 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4119 /* Handle copying a string constant into an array.
4120 The string constant may be shorter than the array.
4121 So copy just the string's actual length, and clear the rest. */
4122 rtx size;
4123 rtx addr;
4125 /* Get the size of the data type of the string,
4126 which is actually the size of the target. */
4127 size = expr_size (exp);
4128 if (GET_CODE (size) == CONST_INT
4129 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4130 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
4131 else
4133 /* Compute the size of the data to copy from the string. */
4134 tree copy_size
4135 = size_binop (MIN_EXPR,
4136 make_tree (sizetype, size),
4137 size_int (TREE_STRING_LENGTH (exp)));
4138 unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
4139 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4140 VOIDmode, 0);
4141 rtx label = 0;
4143 /* Copy that much. */
4144 emit_block_move (target, temp, copy_size_rtx,
4145 TYPE_ALIGN (TREE_TYPE (exp)));
4147 /* Figure out how much is left in TARGET that we have to clear.
4148 Do all calculations in ptr_mode. */
4150 addr = XEXP (target, 0);
4151 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4153 if (GET_CODE (copy_size_rtx) == CONST_INT)
4155 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
4156 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
4157 align = MIN (align,
4158 (unsigned int) (BITS_PER_UNIT
4159 * (INTVAL (copy_size_rtx)
4160 & - INTVAL (copy_size_rtx))));
4162 else
4164 addr = force_reg (ptr_mode, addr);
4165 addr = expand_binop (ptr_mode, add_optab, addr,
4166 copy_size_rtx, NULL_RTX, 0,
4167 OPTAB_LIB_WIDEN);
4169 size = expand_binop (ptr_mode, sub_optab, size,
4170 copy_size_rtx, NULL_RTX, 0,
4171 OPTAB_LIB_WIDEN);
4173 align = BITS_PER_UNIT;
4174 label = gen_label_rtx ();
4175 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4176 GET_MODE (size), 0, 0, label);
4178 align = MIN (align, expr_align (copy_size));
4180 if (size != const0_rtx)
4182 rtx dest = gen_rtx_MEM (BLKmode, addr);
4184 MEM_COPY_ATTRIBUTES (dest, target);
4186 /* Be sure we can write on ADDR. */
4187 in_check_memory_usage = 1;
4188 if (current_function_check_memory_usage)
4189 emit_library_call (chkr_check_addr_libfunc,
4190 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4191 addr, Pmode,
4192 size, TYPE_MODE (sizetype),
4193 GEN_INT (MEMORY_USE_WO),
4194 TYPE_MODE (integer_type_node));
4195 in_check_memory_usage = 0;
4196 clear_storage (dest, size, align);
4199 if (label)
4200 emit_label (label);
4203 /* Handle calls that return values in multiple non-contiguous locations.
4204 The Irix 6 ABI has examples of this. */
4205 else if (GET_CODE (target) == PARALLEL)
4206 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4207 TYPE_ALIGN (TREE_TYPE (exp)));
4208 else if (GET_MODE (temp) == BLKmode)
4209 emit_block_move (target, temp, expr_size (exp),
4210 TYPE_ALIGN (TREE_TYPE (exp)));
4211 else
4212 emit_move_insn (target, temp);
4215 /* If we don't want a value, return NULL_RTX. */
4216 if (! want_value)
4217 return NULL_RTX;
4219 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4220 ??? The latter test doesn't seem to make sense. */
4221 else if (dont_return_target && GET_CODE (temp) != MEM)
4222 return temp;
4224 /* Return TARGET itself if it is a hard register. */
4225 else if (want_value && GET_MODE (target) != BLKmode
4226 && ! (GET_CODE (target) == REG
4227 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4228 return copy_to_reg (target);
4230 else
4231 return target;
4234 /* Return 1 if EXP just contains zeros. */
4236 static int
4237 is_zeros_p (exp)
4238 tree exp;
4240 tree elt;
4242 switch (TREE_CODE (exp))
4244 case CONVERT_EXPR:
4245 case NOP_EXPR:
4246 case NON_LVALUE_EXPR:
4247 return is_zeros_p (TREE_OPERAND (exp, 0));
4249 case INTEGER_CST:
4250 return integer_zerop (exp);
4252 case COMPLEX_CST:
4253 return
4254 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4256 case REAL_CST:
4257 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4259 case CONSTRUCTOR:
4260 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4261 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4262 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4263 if (! is_zeros_p (TREE_VALUE (elt)))
4264 return 0;
4266 return 1;
4268 default:
4269 return 0;
4273 /* Return 1 if EXP contains mostly (3/4) zeros. */
4275 static int
4276 mostly_zeros_p (exp)
4277 tree exp;
4279 if (TREE_CODE (exp) == CONSTRUCTOR)
4281 int elts = 0, zeros = 0;
4282 tree elt = CONSTRUCTOR_ELTS (exp);
4283 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4285 /* If there are no ranges of true bits, it is all zero. */
4286 return elt == NULL_TREE;
4288 for (; elt; elt = TREE_CHAIN (elt))
4290 /* We do not handle the case where the index is a RANGE_EXPR,
4291 so the statistic will be somewhat inaccurate.
4292 We do make a more accurate count in store_constructor itself,
4293 so since this function is only used for nested array elements,
4294 this should be close enough. */
4295 if (mostly_zeros_p (TREE_VALUE (elt)))
4296 zeros++;
4297 elts++;
4300 return 4 * zeros >= 3 * elts;
4303 return is_zeros_p (exp);
4306 /* Helper function for store_constructor.
4307 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4308 TYPE is the type of the CONSTRUCTOR, not the element type.
4309 ALIGN and CLEARED are as for store_constructor.
4310 ALIAS_SET is the alias set to use for any stores.
4312 This provides a recursive shortcut back to store_constructor when it isn't
4313 necessary to go through store_field. This is so that we can pass through
4314 the cleared field to let store_constructor know that we may not have to
4315 clear a substructure if the outer structure has already been cleared. */
4317 static void
4318 store_constructor_field (target, bitsize, bitpos,
4319 mode, exp, type, align, cleared, alias_set)
4320 rtx target;
4321 unsigned HOST_WIDE_INT bitsize;
4322 HOST_WIDE_INT bitpos;
4323 enum machine_mode mode;
4324 tree exp, type;
4325 unsigned int align;
4326 int cleared;
4327 int alias_set;
4329 if (TREE_CODE (exp) == CONSTRUCTOR
4330 && bitpos % BITS_PER_UNIT == 0
4331 /* If we have a non-zero bitpos for a register target, then we just
4332 let store_field do the bitfield handling. This is unlikely to
4333 generate unnecessary clear instructions anyways. */
4334 && (bitpos == 0 || GET_CODE (target) == MEM))
4336 if (bitpos != 0)
4337 target
4338 = change_address (target,
4339 GET_MODE (target) == BLKmode
4340 || 0 != (bitpos
4341 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4342 ? BLKmode : VOIDmode,
4343 plus_constant (XEXP (target, 0),
4344 bitpos / BITS_PER_UNIT));
4347 /* Show the alignment may no longer be what it was and update the alias
4348 set, if required. */
4349 if (bitpos != 0)
4350 align = MIN (align, (unsigned int) bitpos & - bitpos);
4351 if (GET_CODE (target) == MEM)
4352 MEM_ALIAS_SET (target) = alias_set;
4354 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4356 else
4357 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4358 int_size_in_bytes (type), alias_set);
4361 /* Store the value of constructor EXP into the rtx TARGET.
4362 TARGET is either a REG or a MEM.
4363 ALIGN is the maximum known alignment for TARGET.
4364 CLEARED is true if TARGET is known to have been zero'd.
4365 SIZE is the number of bytes of TARGET we are allowed to modify: this
4366 may not be the same as the size of EXP if we are assigning to a field
4367 which has been packed to exclude padding bits. */
4369 static void
4370 store_constructor (exp, target, align, cleared, size)
4371 tree exp;
4372 rtx target;
4373 unsigned int align;
4374 int cleared;
4375 HOST_WIDE_INT size;
4377 tree type = TREE_TYPE (exp);
4378 #ifdef WORD_REGISTER_OPERATIONS
4379 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4380 #endif
4382 /* We know our target cannot conflict, since safe_from_p has been called. */
4383 #if 0
4384 /* Don't try copying piece by piece into a hard register
4385 since that is vulnerable to being clobbered by EXP.
4386 Instead, construct in a pseudo register and then copy it all. */
4387 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4389 rtx temp = gen_reg_rtx (GET_MODE (target));
4390 store_constructor (exp, temp, align, cleared, size);
4391 emit_move_insn (target, temp);
4392 return;
4394 #endif
4396 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4397 || TREE_CODE (type) == QUAL_UNION_TYPE)
4399 register tree elt;
4401 /* Inform later passes that the whole union value is dead. */
4402 if ((TREE_CODE (type) == UNION_TYPE
4403 || TREE_CODE (type) == QUAL_UNION_TYPE)
4404 && ! cleared)
4406 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4408 /* If the constructor is empty, clear the union. */
4409 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4410 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4413 /* If we are building a static constructor into a register,
4414 set the initial value as zero so we can fold the value into
4415 a constant. But if more than one register is involved,
4416 this probably loses. */
4417 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4418 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4420 if (! cleared)
4421 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4423 cleared = 1;
4426 /* If the constructor has fewer fields than the structure
4427 or if we are initializing the structure to mostly zeros,
4428 clear the whole structure first. Don't do this is TARGET is
4429 register whose mode size isn't equal to SIZE since clear_storage
4430 can't handle this case. */
4431 else if (size > 0
4432 && ((list_length (CONSTRUCTOR_ELTS (exp))
4433 != fields_length (type))
4434 || mostly_zeros_p (exp))
4435 && (GET_CODE (target) != REG
4436 || (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) == size))
4438 if (! cleared)
4439 clear_storage (target, GEN_INT (size), align);
4441 cleared = 1;
4443 else if (! cleared)
4444 /* Inform later passes that the old value is dead. */
4445 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4447 /* Store each element of the constructor into
4448 the corresponding field of TARGET. */
4450 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4452 register tree field = TREE_PURPOSE (elt);
4453 #ifdef WORD_REGISTER_OPERATIONS
4454 tree value = TREE_VALUE (elt);
4455 #endif
4456 register enum machine_mode mode;
4457 HOST_WIDE_INT bitsize;
4458 HOST_WIDE_INT bitpos = 0;
4459 int unsignedp;
4460 tree offset;
4461 rtx to_rtx = target;
4463 /* Just ignore missing fields.
4464 We cleared the whole structure, above,
4465 if any fields are missing. */
4466 if (field == 0)
4467 continue;
4469 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4470 continue;
4472 if (host_integerp (DECL_SIZE (field), 1))
4473 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4474 else
4475 bitsize = -1;
4477 unsignedp = TREE_UNSIGNED (field);
4478 mode = DECL_MODE (field);
4479 if (DECL_BIT_FIELD (field))
4480 mode = VOIDmode;
4482 offset = DECL_FIELD_OFFSET (field);
4483 if (host_integerp (offset, 0)
4484 && host_integerp (bit_position (field), 0))
4486 bitpos = int_bit_position (field);
4487 offset = 0;
4489 else
4490 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4492 if (offset)
4494 rtx offset_rtx;
4496 if (contains_placeholder_p (offset))
4497 offset = build (WITH_RECORD_EXPR, sizetype,
4498 offset, make_tree (TREE_TYPE (exp), target));
4500 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4501 if (GET_CODE (to_rtx) != MEM)
4502 abort ();
4504 if (GET_MODE (offset_rtx) != ptr_mode)
4506 #ifdef POINTERS_EXTEND_UNSIGNED
4507 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4508 #else
4509 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4510 #endif
4513 to_rtx
4514 = change_address (to_rtx, VOIDmode,
4515 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4516 force_reg (ptr_mode,
4517 offset_rtx)));
4518 align = DECL_OFFSET_ALIGN (field);
4521 if (TREE_READONLY (field))
4523 if (GET_CODE (to_rtx) == MEM)
4524 to_rtx = copy_rtx (to_rtx);
4526 RTX_UNCHANGING_P (to_rtx) = 1;
4529 #ifdef WORD_REGISTER_OPERATIONS
4530 /* If this initializes a field that is smaller than a word, at the
4531 start of a word, try to widen it to a full word.
4532 This special case allows us to output C++ member function
4533 initializations in a form that the optimizers can understand. */
4534 if (GET_CODE (target) == REG
4535 && bitsize < BITS_PER_WORD
4536 && bitpos % BITS_PER_WORD == 0
4537 && GET_MODE_CLASS (mode) == MODE_INT
4538 && TREE_CODE (value) == INTEGER_CST
4539 && exp_size >= 0
4540 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4542 tree type = TREE_TYPE (value);
4543 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4545 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4546 value = convert (type, value);
4548 if (BYTES_BIG_ENDIAN)
4549 value
4550 = fold (build (LSHIFT_EXPR, type, value,
4551 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4552 bitsize = BITS_PER_WORD;
4553 mode = word_mode;
4555 #endif
4556 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4557 TREE_VALUE (elt), type, align, cleared,
4558 (DECL_NONADDRESSABLE_P (field)
4559 && GET_CODE (to_rtx) == MEM)
4560 ? MEM_ALIAS_SET (to_rtx)
4561 : get_alias_set (TREE_TYPE (field)));
4564 else if (TREE_CODE (type) == ARRAY_TYPE)
4566 register tree elt;
4567 register int i;
4568 int need_to_clear;
4569 tree domain = TYPE_DOMAIN (type);
4570 tree elttype = TREE_TYPE (type);
4571 int const_bounds_p = (host_integerp (TYPE_MIN_VALUE (domain), 0)
4572 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4573 HOST_WIDE_INT minelt;
4574 HOST_WIDE_INT maxelt;
4576 /* If we have constant bounds for the range of the type, get them. */
4577 if (const_bounds_p)
4579 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4580 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4583 /* If the constructor has fewer elements than the array,
4584 clear the whole array first. Similarly if this is
4585 static constructor of a non-BLKmode object. */
4586 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4587 need_to_clear = 1;
4588 else
4590 HOST_WIDE_INT count = 0, zero_count = 0;
4591 need_to_clear = ! const_bounds_p;
4593 /* This loop is a more accurate version of the loop in
4594 mostly_zeros_p (it handles RANGE_EXPR in an index).
4595 It is also needed to check for missing elements. */
4596 for (elt = CONSTRUCTOR_ELTS (exp);
4597 elt != NULL_TREE && ! need_to_clear;
4598 elt = TREE_CHAIN (elt))
4600 tree index = TREE_PURPOSE (elt);
4601 HOST_WIDE_INT this_node_count;
4603 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4605 tree lo_index = TREE_OPERAND (index, 0);
4606 tree hi_index = TREE_OPERAND (index, 1);
4608 if (! host_integerp (lo_index, 1)
4609 || ! host_integerp (hi_index, 1))
4611 need_to_clear = 1;
4612 break;
4615 this_node_count = (tree_low_cst (hi_index, 1)
4616 - tree_low_cst (lo_index, 1) + 1);
4618 else
4619 this_node_count = 1;
4621 count += this_node_count;
4622 if (mostly_zeros_p (TREE_VALUE (elt)))
4623 zero_count += this_node_count;
4626 /* Clear the entire array first if there are any missing elements,
4627 or if the incidence of zero elements is >= 75%. */
4628 if (! need_to_clear
4629 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4630 need_to_clear = 1;
4633 if (need_to_clear && size > 0)
4635 if (! cleared)
4636 clear_storage (target, GEN_INT (size), align);
4637 cleared = 1;
4639 else
4640 /* Inform later passes that the old value is dead. */
4641 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4643 /* Store each element of the constructor into
4644 the corresponding element of TARGET, determined
4645 by counting the elements. */
4646 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4647 elt;
4648 elt = TREE_CHAIN (elt), i++)
4650 register enum machine_mode mode;
4651 HOST_WIDE_INT bitsize;
4652 HOST_WIDE_INT bitpos;
4653 int unsignedp;
4654 tree value = TREE_VALUE (elt);
4655 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4656 tree index = TREE_PURPOSE (elt);
4657 rtx xtarget = target;
4659 if (cleared && is_zeros_p (value))
4660 continue;
4662 unsignedp = TREE_UNSIGNED (elttype);
4663 mode = TYPE_MODE (elttype);
4664 if (mode == BLKmode)
4665 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4666 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4667 : -1);
4668 else
4669 bitsize = GET_MODE_BITSIZE (mode);
4671 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4673 tree lo_index = TREE_OPERAND (index, 0);
4674 tree hi_index = TREE_OPERAND (index, 1);
4675 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4676 struct nesting *loop;
4677 HOST_WIDE_INT lo, hi, count;
4678 tree position;
4680 /* If the range is constant and "small", unroll the loop. */
4681 if (const_bounds_p
4682 && host_integerp (lo_index, 0)
4683 && host_integerp (hi_index, 0)
4684 && (lo = tree_low_cst (lo_index, 0),
4685 hi = tree_low_cst (hi_index, 0),
4686 count = hi - lo + 1,
4687 (GET_CODE (target) != MEM
4688 || count <= 2
4689 || (host_integerp (TYPE_SIZE (elttype), 1)
4690 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4691 <= 40 * 8)))))
4693 lo -= minelt; hi -= minelt;
4694 for (; lo <= hi; lo++)
4696 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4697 store_constructor_field
4698 (target, bitsize, bitpos, mode, value, type, align,
4699 cleared,
4700 TYPE_NONALIASED_COMPONENT (type)
4701 ? MEM_ALIAS_SET (target) : get_alias_set (elttype));
4704 else
4706 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4707 loop_top = gen_label_rtx ();
4708 loop_end = gen_label_rtx ();
4710 unsignedp = TREE_UNSIGNED (domain);
4712 index = build_decl (VAR_DECL, NULL_TREE, domain);
4714 DECL_RTL (index) = index_r
4715 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4716 &unsignedp, 0));
4718 if (TREE_CODE (value) == SAVE_EXPR
4719 && SAVE_EXPR_RTL (value) == 0)
4721 /* Make sure value gets expanded once before the
4722 loop. */
4723 expand_expr (value, const0_rtx, VOIDmode, 0);
4724 emit_queue ();
4726 store_expr (lo_index, index_r, 0);
4727 loop = expand_start_loop (0);
4729 /* Assign value to element index. */
4730 position
4731 = convert (ssizetype,
4732 fold (build (MINUS_EXPR, TREE_TYPE (index),
4733 index, TYPE_MIN_VALUE (domain))));
4734 position = size_binop (MULT_EXPR, position,
4735 convert (ssizetype,
4736 TYPE_SIZE_UNIT (elttype)));
4738 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4739 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4740 xtarget = change_address (target, mode, addr);
4741 if (TREE_CODE (value) == CONSTRUCTOR)
4742 store_constructor (value, xtarget, align, cleared,
4743 bitsize / BITS_PER_UNIT);
4744 else
4745 store_expr (value, xtarget, 0);
4747 expand_exit_loop_if_false (loop,
4748 build (LT_EXPR, integer_type_node,
4749 index, hi_index));
4751 expand_increment (build (PREINCREMENT_EXPR,
4752 TREE_TYPE (index),
4753 index, integer_one_node), 0, 0);
4754 expand_end_loop ();
4755 emit_label (loop_end);
4758 else if ((index != 0 && ! host_integerp (index, 0))
4759 || ! host_integerp (TYPE_SIZE (elttype), 1))
4761 rtx pos_rtx, addr;
4762 tree position;
4764 if (index == 0)
4765 index = ssize_int (1);
4767 if (minelt)
4768 index = convert (ssizetype,
4769 fold (build (MINUS_EXPR, index,
4770 TYPE_MIN_VALUE (domain))));
4772 position = size_binop (MULT_EXPR, index,
4773 convert (ssizetype,
4774 TYPE_SIZE_UNIT (elttype)));
4775 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4776 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4777 xtarget = change_address (target, mode, addr);
4778 store_expr (value, xtarget, 0);
4780 else
4782 if (index != 0)
4783 bitpos = ((tree_low_cst (index, 0) - minelt)
4784 * tree_low_cst (TYPE_SIZE (elttype), 1));
4785 else
4786 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4788 store_constructor_field (target, bitsize, bitpos, mode, value,
4789 type, align, cleared,
4790 TYPE_NONALIASED_COMPONENT (type)
4791 && GET_CODE (target) == MEM
4792 ? MEM_ALIAS_SET (target) :
4793 get_alias_set (elttype));
4799 /* Set constructor assignments. */
4800 else if (TREE_CODE (type) == SET_TYPE)
4802 tree elt = CONSTRUCTOR_ELTS (exp);
4803 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4804 tree domain = TYPE_DOMAIN (type);
4805 tree domain_min, domain_max, bitlength;
4807 /* The default implementation strategy is to extract the constant
4808 parts of the constructor, use that to initialize the target,
4809 and then "or" in whatever non-constant ranges we need in addition.
4811 If a large set is all zero or all ones, it is
4812 probably better to set it using memset (if available) or bzero.
4813 Also, if a large set has just a single range, it may also be
4814 better to first clear all the first clear the set (using
4815 bzero/memset), and set the bits we want. */
4817 /* Check for all zeros. */
4818 if (elt == NULL_TREE && size > 0)
4820 if (!cleared)
4821 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
4822 return;
4825 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4826 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4827 bitlength = size_binop (PLUS_EXPR,
4828 size_diffop (domain_max, domain_min),
4829 ssize_int (1));
4831 nbits = tree_low_cst (bitlength, 1);
4833 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4834 are "complicated" (more than one range), initialize (the
4835 constant parts) by copying from a constant. */
4836 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4837 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4839 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4840 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4841 char *bit_buffer = (char *) alloca (nbits);
4842 HOST_WIDE_INT word = 0;
4843 unsigned int bit_pos = 0;
4844 unsigned int ibit = 0;
4845 unsigned int offset = 0; /* In bytes from beginning of set. */
4847 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4848 for (;;)
4850 if (bit_buffer[ibit])
4852 if (BYTES_BIG_ENDIAN)
4853 word |= (1 << (set_word_size - 1 - bit_pos));
4854 else
4855 word |= 1 << bit_pos;
4858 bit_pos++; ibit++;
4859 if (bit_pos >= set_word_size || ibit == nbits)
4861 if (word != 0 || ! cleared)
4863 rtx datum = GEN_INT (word);
4864 rtx to_rtx;
4866 /* The assumption here is that it is safe to use
4867 XEXP if the set is multi-word, but not if
4868 it's single-word. */
4869 if (GET_CODE (target) == MEM)
4871 to_rtx = plus_constant (XEXP (target, 0), offset);
4872 to_rtx = change_address (target, mode, to_rtx);
4874 else if (offset == 0)
4875 to_rtx = target;
4876 else
4877 abort ();
4878 emit_move_insn (to_rtx, datum);
4881 if (ibit == nbits)
4882 break;
4883 word = 0;
4884 bit_pos = 0;
4885 offset += set_word_size / BITS_PER_UNIT;
4889 else if (!cleared)
4890 /* Don't bother clearing storage if the set is all ones. */
4891 if (TREE_CHAIN (elt) != NULL_TREE
4892 || (TREE_PURPOSE (elt) == NULL_TREE
4893 ? nbits != 1
4894 : ( ! host_integerp (TREE_VALUE (elt), 0)
4895 || ! host_integerp (TREE_PURPOSE (elt), 0)
4896 || (tree_low_cst (TREE_VALUE (elt), 0)
4897 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4898 != (HOST_WIDE_INT) nbits))))
4899 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4901 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4903 /* Start of range of element or NULL. */
4904 tree startbit = TREE_PURPOSE (elt);
4905 /* End of range of element, or element value. */
4906 tree endbit = TREE_VALUE (elt);
4907 #ifdef TARGET_MEM_FUNCTIONS
4908 HOST_WIDE_INT startb, endb;
4909 #endif
4910 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4912 bitlength_rtx = expand_expr (bitlength,
4913 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4915 /* Handle non-range tuple element like [ expr ]. */
4916 if (startbit == NULL_TREE)
4918 startbit = save_expr (endbit);
4919 endbit = startbit;
4922 startbit = convert (sizetype, startbit);
4923 endbit = convert (sizetype, endbit);
4924 if (! integer_zerop (domain_min))
4926 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4927 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4929 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4930 EXPAND_CONST_ADDRESS);
4931 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4932 EXPAND_CONST_ADDRESS);
4934 if (REG_P (target))
4936 targetx
4937 = assign_temp
4938 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
4939 TYPE_QUAL_CONST)),
4940 0, 1, 1);
4941 emit_move_insn (targetx, target);
4944 else if (GET_CODE (target) == MEM)
4945 targetx = target;
4946 else
4947 abort ();
4949 #ifdef TARGET_MEM_FUNCTIONS
4950 /* Optimization: If startbit and endbit are
4951 constants divisible by BITS_PER_UNIT,
4952 call memset instead. */
4953 if (TREE_CODE (startbit) == INTEGER_CST
4954 && TREE_CODE (endbit) == INTEGER_CST
4955 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4956 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4958 emit_library_call (memset_libfunc, LCT_NORMAL,
4959 VOIDmode, 3,
4960 plus_constant (XEXP (targetx, 0),
4961 startb / BITS_PER_UNIT),
4962 Pmode,
4963 constm1_rtx, TYPE_MODE (integer_type_node),
4964 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4965 TYPE_MODE (sizetype));
4967 else
4968 #endif
4969 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4970 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
4971 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
4972 startbit_rtx, TYPE_MODE (sizetype),
4973 endbit_rtx, TYPE_MODE (sizetype));
4975 if (REG_P (target))
4976 emit_move_insn (target, targetx);
4980 else
4981 abort ();
4984 /* Store the value of EXP (an expression tree)
4985 into a subfield of TARGET which has mode MODE and occupies
4986 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4987 If MODE is VOIDmode, it means that we are storing into a bit-field.
4989 If VALUE_MODE is VOIDmode, return nothing in particular.
4990 UNSIGNEDP is not used in this case.
4992 Otherwise, return an rtx for the value stored. This rtx
4993 has mode VALUE_MODE if that is convenient to do.
4994 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4996 ALIGN is the alignment that TARGET is known to have.
4997 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4999 ALIAS_SET is the alias set for the destination. This value will
5000 (in general) be different from that for TARGET, since TARGET is a
5001 reference to the containing structure. */
5003 static rtx
5004 store_field (target, bitsize, bitpos, mode, exp, value_mode,
5005 unsignedp, align, total_size, alias_set)
5006 rtx target;
5007 HOST_WIDE_INT bitsize;
5008 HOST_WIDE_INT bitpos;
5009 enum machine_mode mode;
5010 tree exp;
5011 enum machine_mode value_mode;
5012 int unsignedp;
5013 unsigned int align;
5014 HOST_WIDE_INT total_size;
5015 int alias_set;
5017 HOST_WIDE_INT width_mask = 0;
5019 if (TREE_CODE (exp) == ERROR_MARK)
5020 return const0_rtx;
5022 if (bitsize < HOST_BITS_PER_WIDE_INT)
5023 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5025 /* If we are storing into an unaligned field of an aligned union that is
5026 in a register, we may have the mode of TARGET being an integer mode but
5027 MODE == BLKmode. In that case, get an aligned object whose size and
5028 alignment are the same as TARGET and store TARGET into it (we can avoid
5029 the store if the field being stored is the entire width of TARGET). Then
5030 call ourselves recursively to store the field into a BLKmode version of
5031 that object. Finally, load from the object into TARGET. This is not
5032 very efficient in general, but should only be slightly more expensive
5033 than the otherwise-required unaligned accesses. Perhaps this can be
5034 cleaned up later. */
5036 if (mode == BLKmode
5037 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5039 rtx object
5040 = assign_temp
5041 (build_qualified_type (type_for_mode (GET_MODE (target), 0),
5042 TYPE_QUAL_CONST),
5043 0, 1, 1);
5044 rtx blk_object = copy_rtx (object);
5046 PUT_MODE (blk_object, BLKmode);
5048 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5049 emit_move_insn (object, target);
5051 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
5052 align, total_size, alias_set);
5054 /* Even though we aren't returning target, we need to
5055 give it the updated value. */
5056 emit_move_insn (target, object);
5058 return blk_object;
5061 if (GET_CODE (target) == CONCAT)
5063 /* We're storing into a struct containing a single __complex. */
5065 if (bitpos != 0)
5066 abort ();
5067 return store_expr (exp, target, 0);
5070 /* If the structure is in a register or if the component
5071 is a bit field, we cannot use addressing to access it.
5072 Use bit-field techniques or SUBREG to store in it. */
5074 if (mode == VOIDmode
5075 || (mode != BLKmode && ! direct_store[(int) mode]
5076 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5077 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5078 || GET_CODE (target) == REG
5079 || GET_CODE (target) == SUBREG
5080 /* If the field isn't aligned enough to store as an ordinary memref,
5081 store it as a bit field. */
5082 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5083 && (align < GET_MODE_ALIGNMENT (mode)
5084 || bitpos % GET_MODE_ALIGNMENT (mode)))
5085 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5086 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
5087 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
5088 /* If the RHS and field are a constant size and the size of the
5089 RHS isn't the same size as the bitfield, we must use bitfield
5090 operations. */
5091 || (bitsize >= 0
5092 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5093 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5095 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5097 /* If BITSIZE is narrower than the size of the type of EXP
5098 we will be narrowing TEMP. Normally, what's wanted are the
5099 low-order bits. However, if EXP's type is a record and this is
5100 big-endian machine, we want the upper BITSIZE bits. */
5101 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5102 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
5103 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5104 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5105 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5106 - bitsize),
5107 temp, 1);
5109 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5110 MODE. */
5111 if (mode != VOIDmode && mode != BLKmode
5112 && mode != TYPE_MODE (TREE_TYPE (exp)))
5113 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5115 /* If the modes of TARGET and TEMP are both BLKmode, both
5116 must be in memory and BITPOS must be aligned on a byte
5117 boundary. If so, we simply do a block copy. */
5118 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5120 unsigned int exp_align = expr_align (exp);
5122 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5123 || bitpos % BITS_PER_UNIT != 0)
5124 abort ();
5126 target = change_address (target, VOIDmode,
5127 plus_constant (XEXP (target, 0),
5128 bitpos / BITS_PER_UNIT));
5130 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
5131 align = MIN (exp_align, align);
5133 /* Find an alignment that is consistent with the bit position. */
5134 while ((bitpos % align) != 0)
5135 align >>= 1;
5137 emit_block_move (target, temp,
5138 bitsize == -1 ? expr_size (exp)
5139 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5140 / BITS_PER_UNIT),
5141 align);
5143 return value_mode == VOIDmode ? const0_rtx : target;
5146 /* Store the value in the bitfield. */
5147 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
5148 if (value_mode != VOIDmode)
5150 /* The caller wants an rtx for the value. */
5151 /* If possible, avoid refetching from the bitfield itself. */
5152 if (width_mask != 0
5153 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5155 tree count;
5156 enum machine_mode tmode;
5158 if (unsignedp)
5159 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
5160 tmode = GET_MODE (temp);
5161 if (tmode == VOIDmode)
5162 tmode = value_mode;
5163 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5164 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5165 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5167 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5168 NULL_RTX, value_mode, 0, align,
5169 total_size);
5171 return const0_rtx;
5173 else
5175 rtx addr = XEXP (target, 0);
5176 rtx to_rtx;
5178 /* If a value is wanted, it must be the lhs;
5179 so make the address stable for multiple use. */
5181 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5182 && ! CONSTANT_ADDRESS_P (addr)
5183 /* A frame-pointer reference is already stable. */
5184 && ! (GET_CODE (addr) == PLUS
5185 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5186 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5187 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5188 addr = copy_to_reg (addr);
5190 /* Now build a reference to just the desired component. */
5192 to_rtx = copy_rtx (change_address (target, mode,
5193 plus_constant (addr,
5194 (bitpos
5195 / BITS_PER_UNIT))));
5196 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5197 MEM_ALIAS_SET (to_rtx) = alias_set;
5199 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5203 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5204 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
5205 ARRAY_REFs and find the ultimate containing object, which we return.
5207 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5208 bit position, and *PUNSIGNEDP to the signedness of the field.
5209 If the position of the field is variable, we store a tree
5210 giving the variable offset (in units) in *POFFSET.
5211 This offset is in addition to the bit position.
5212 If the position is not variable, we store 0 in *POFFSET.
5213 We set *PALIGNMENT to the alignment of the address that will be
5214 computed. This is the alignment of the thing we return if *POFFSET
5215 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5217 If any of the extraction expressions is volatile,
5218 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5220 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5221 is a mode that can be used to access the field. In that case, *PBITSIZE
5222 is redundant.
5224 If the field describes a variable-sized object, *PMODE is set to
5225 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5226 this case, but the address of the object can be found. */
5228 tree
5229 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5230 punsignedp, pvolatilep, palignment)
5231 tree exp;
5232 HOST_WIDE_INT *pbitsize;
5233 HOST_WIDE_INT *pbitpos;
5234 tree *poffset;
5235 enum machine_mode *pmode;
5236 int *punsignedp;
5237 int *pvolatilep;
5238 unsigned int *palignment;
5240 tree size_tree = 0;
5241 enum machine_mode mode = VOIDmode;
5242 tree offset = size_zero_node;
5243 tree bit_offset = bitsize_zero_node;
5244 unsigned int alignment = BIGGEST_ALIGNMENT;
5245 tree tem;
5247 /* First get the mode, signedness, and size. We do this from just the
5248 outermost expression. */
5249 if (TREE_CODE (exp) == COMPONENT_REF)
5251 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5252 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5253 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5255 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5257 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5259 size_tree = TREE_OPERAND (exp, 1);
5260 *punsignedp = TREE_UNSIGNED (exp);
5262 else
5264 mode = TYPE_MODE (TREE_TYPE (exp));
5265 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5267 if (mode == BLKmode)
5268 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5269 else
5270 *pbitsize = GET_MODE_BITSIZE (mode);
5273 if (size_tree != 0)
5275 if (! host_integerp (size_tree, 1))
5276 mode = BLKmode, *pbitsize = -1;
5277 else
5278 *pbitsize = tree_low_cst (size_tree, 1);
5281 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5282 and find the ultimate containing object. */
5283 while (1)
5285 if (TREE_CODE (exp) == BIT_FIELD_REF)
5286 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5287 else if (TREE_CODE (exp) == COMPONENT_REF)
5289 tree field = TREE_OPERAND (exp, 1);
5290 tree this_offset = DECL_FIELD_OFFSET (field);
5292 /* If this field hasn't been filled in yet, don't go
5293 past it. This should only happen when folding expressions
5294 made during type construction. */
5295 if (this_offset == 0)
5296 break;
5297 else if (! TREE_CONSTANT (this_offset)
5298 && contains_placeholder_p (this_offset))
5299 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5301 offset = size_binop (PLUS_EXPR, offset, this_offset);
5302 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5303 DECL_FIELD_BIT_OFFSET (field));
5305 if (! host_integerp (offset, 0))
5306 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5309 else if (TREE_CODE (exp) == ARRAY_REF)
5311 tree index = TREE_OPERAND (exp, 1);
5312 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5313 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5314 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (exp));
5316 /* We assume all arrays have sizes that are a multiple of a byte.
5317 First subtract the lower bound, if any, in the type of the
5318 index, then convert to sizetype and multiply by the size of the
5319 array element. */
5320 if (low_bound != 0 && ! integer_zerop (low_bound))
5321 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5322 index, low_bound));
5324 /* If the index has a self-referential type, pass it to a
5325 WITH_RECORD_EXPR; if the component size is, pass our
5326 component to one. */
5327 if (! TREE_CONSTANT (index)
5328 && contains_placeholder_p (index))
5329 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5330 if (! TREE_CONSTANT (unit_size)
5331 && contains_placeholder_p (unit_size))
5332 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size,
5333 TREE_OPERAND (exp, 0));
5335 offset = size_binop (PLUS_EXPR, offset,
5336 size_binop (MULT_EXPR,
5337 convert (sizetype, index),
5338 unit_size));
5341 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5342 && ! ((TREE_CODE (exp) == NOP_EXPR
5343 || TREE_CODE (exp) == CONVERT_EXPR)
5344 && (TYPE_MODE (TREE_TYPE (exp))
5345 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5346 break;
5348 /* If any reference in the chain is volatile, the effect is volatile. */
5349 if (TREE_THIS_VOLATILE (exp))
5350 *pvolatilep = 1;
5352 /* If the offset is non-constant already, then we can't assume any
5353 alignment more than the alignment here. */
5354 if (! TREE_CONSTANT (offset))
5355 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5357 exp = TREE_OPERAND (exp, 0);
5360 if (DECL_P (exp))
5361 alignment = MIN (alignment, DECL_ALIGN (exp));
5362 else if (TREE_TYPE (exp) != 0)
5363 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5365 /* If OFFSET is constant, see if we can return the whole thing as a
5366 constant bit position. Otherwise, split it up. */
5367 if (host_integerp (offset, 0)
5368 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5369 bitsize_unit_node))
5370 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5371 && host_integerp (tem, 0))
5372 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5373 else
5374 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5376 *pmode = mode;
5377 *palignment = alignment;
5378 return exp;
5381 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5383 static enum memory_use_mode
5384 get_memory_usage_from_modifier (modifier)
5385 enum expand_modifier modifier;
5387 switch (modifier)
5389 case EXPAND_NORMAL:
5390 case EXPAND_SUM:
5391 return MEMORY_USE_RO;
5392 break;
5393 case EXPAND_MEMORY_USE_WO:
5394 return MEMORY_USE_WO;
5395 break;
5396 case EXPAND_MEMORY_USE_RW:
5397 return MEMORY_USE_RW;
5398 break;
5399 case EXPAND_MEMORY_USE_DONT:
5400 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5401 MEMORY_USE_DONT, because they are modifiers to a call of
5402 expand_expr in the ADDR_EXPR case of expand_expr. */
5403 case EXPAND_CONST_ADDRESS:
5404 case EXPAND_INITIALIZER:
5405 return MEMORY_USE_DONT;
5406 case EXPAND_MEMORY_USE_BAD:
5407 default:
5408 abort ();
5412 /* Given an rtx VALUE that may contain additions and multiplications,
5413 return an equivalent value that just refers to a register or memory.
5414 This is done by generating instructions to perform the arithmetic
5415 and returning a pseudo-register containing the value.
5417 The returned value may be a REG, SUBREG, MEM or constant. */
5420 force_operand (value, target)
5421 rtx value, target;
5423 register optab binoptab = 0;
5424 /* Use a temporary to force order of execution of calls to
5425 `force_operand'. */
5426 rtx tmp;
5427 register rtx op2;
5428 /* Use subtarget as the target for operand 0 of a binary operation. */
5429 register rtx subtarget = get_subtarget (target);
5431 /* Check for a PIC address load. */
5432 if (flag_pic
5433 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5434 && XEXP (value, 0) == pic_offset_table_rtx
5435 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5436 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5437 || GET_CODE (XEXP (value, 1)) == CONST))
5439 if (!subtarget)
5440 subtarget = gen_reg_rtx (GET_MODE (value));
5441 emit_move_insn (subtarget, value);
5442 return subtarget;
5445 if (GET_CODE (value) == PLUS)
5446 binoptab = add_optab;
5447 else if (GET_CODE (value) == MINUS)
5448 binoptab = sub_optab;
5449 else if (GET_CODE (value) == MULT)
5451 op2 = XEXP (value, 1);
5452 if (!CONSTANT_P (op2)
5453 && !(GET_CODE (op2) == REG && op2 != subtarget))
5454 subtarget = 0;
5455 tmp = force_operand (XEXP (value, 0), subtarget);
5456 return expand_mult (GET_MODE (value), tmp,
5457 force_operand (op2, NULL_RTX),
5458 target, 1);
5461 if (binoptab)
5463 op2 = XEXP (value, 1);
5464 if (!CONSTANT_P (op2)
5465 && !(GET_CODE (op2) == REG && op2 != subtarget))
5466 subtarget = 0;
5467 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5469 binoptab = add_optab;
5470 op2 = negate_rtx (GET_MODE (value), op2);
5473 /* Check for an addition with OP2 a constant integer and our first
5474 operand a PLUS of a virtual register and something else. In that
5475 case, we want to emit the sum of the virtual register and the
5476 constant first and then add the other value. This allows virtual
5477 register instantiation to simply modify the constant rather than
5478 creating another one around this addition. */
5479 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5480 && GET_CODE (XEXP (value, 0)) == PLUS
5481 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5482 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5483 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5485 rtx temp = expand_binop (GET_MODE (value), binoptab,
5486 XEXP (XEXP (value, 0), 0), op2,
5487 subtarget, 0, OPTAB_LIB_WIDEN);
5488 return expand_binop (GET_MODE (value), binoptab, temp,
5489 force_operand (XEXP (XEXP (value, 0), 1), 0),
5490 target, 0, OPTAB_LIB_WIDEN);
5493 tmp = force_operand (XEXP (value, 0), subtarget);
5494 return expand_binop (GET_MODE (value), binoptab, tmp,
5495 force_operand (op2, NULL_RTX),
5496 target, 0, OPTAB_LIB_WIDEN);
5497 /* We give UNSIGNEDP = 0 to expand_binop
5498 because the only operations we are expanding here are signed ones. */
5500 return value;
5503 /* Subroutine of expand_expr:
5504 save the non-copied parts (LIST) of an expr (LHS), and return a list
5505 which can restore these values to their previous values,
5506 should something modify their storage. */
5508 static tree
5509 save_noncopied_parts (lhs, list)
5510 tree lhs;
5511 tree list;
5513 tree tail;
5514 tree parts = 0;
5516 for (tail = list; tail; tail = TREE_CHAIN (tail))
5517 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5518 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5519 else
5521 tree part = TREE_VALUE (tail);
5522 tree part_type = TREE_TYPE (part);
5523 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5524 rtx target
5525 = assign_temp (build_qualified_type (part_type,
5526 (TYPE_QUALS (part_type)
5527 | TYPE_QUAL_CONST)),
5528 0, 1, 1);
5530 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5531 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5532 parts = tree_cons (to_be_saved,
5533 build (RTL_EXPR, part_type, NULL_TREE,
5534 (tree) target),
5535 parts);
5536 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5538 return parts;
5541 /* Subroutine of expand_expr:
5542 record the non-copied parts (LIST) of an expr (LHS), and return a list
5543 which specifies the initial values of these parts. */
5545 static tree
5546 init_noncopied_parts (lhs, list)
5547 tree lhs;
5548 tree list;
5550 tree tail;
5551 tree parts = 0;
5553 for (tail = list; tail; tail = TREE_CHAIN (tail))
5554 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5555 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5556 else if (TREE_PURPOSE (tail))
5558 tree part = TREE_VALUE (tail);
5559 tree part_type = TREE_TYPE (part);
5560 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5561 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5563 return parts;
5566 /* Subroutine of expand_expr: return nonzero iff there is no way that
5567 EXP can reference X, which is being modified. TOP_P is nonzero if this
5568 call is going to be used to determine whether we need a temporary
5569 for EXP, as opposed to a recursive call to this function.
5571 It is always safe for this routine to return zero since it merely
5572 searches for optimization opportunities. */
5575 safe_from_p (x, exp, top_p)
5576 rtx x;
5577 tree exp;
5578 int top_p;
5580 rtx exp_rtl = 0;
5581 int i, nops;
5582 static tree save_expr_list;
5584 if (x == 0
5585 /* If EXP has varying size, we MUST use a target since we currently
5586 have no way of allocating temporaries of variable size
5587 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5588 So we assume here that something at a higher level has prevented a
5589 clash. This is somewhat bogus, but the best we can do. Only
5590 do this when X is BLKmode and when we are at the top level. */
5591 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5592 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5593 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5594 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5595 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5596 != INTEGER_CST)
5597 && GET_MODE (x) == BLKmode)
5598 /* If X is in the outgoing argument area, it is always safe. */
5599 || (GET_CODE (x) == MEM
5600 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5601 || (GET_CODE (XEXP (x, 0)) == PLUS
5602 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5603 return 1;
5605 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5606 find the underlying pseudo. */
5607 if (GET_CODE (x) == SUBREG)
5609 x = SUBREG_REG (x);
5610 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5611 return 0;
5614 /* A SAVE_EXPR might appear many times in the expression passed to the
5615 top-level safe_from_p call, and if it has a complex subexpression,
5616 examining it multiple times could result in a combinatorial explosion.
5617 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5618 with optimization took about 28 minutes to compile -- even though it was
5619 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5620 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5621 we have processed. Note that the only test of top_p was above. */
5623 if (top_p)
5625 int rtn;
5626 tree t;
5628 save_expr_list = 0;
5630 rtn = safe_from_p (x, exp, 0);
5632 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5633 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5635 return rtn;
5638 /* Now look at our tree code and possibly recurse. */
5639 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5641 case 'd':
5642 exp_rtl = DECL_RTL (exp);
5643 break;
5645 case 'c':
5646 return 1;
5648 case 'x':
5649 if (TREE_CODE (exp) == TREE_LIST)
5650 return ((TREE_VALUE (exp) == 0
5651 || safe_from_p (x, TREE_VALUE (exp), 0))
5652 && (TREE_CHAIN (exp) == 0
5653 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5654 else if (TREE_CODE (exp) == ERROR_MARK)
5655 return 1; /* An already-visited SAVE_EXPR? */
5656 else
5657 return 0;
5659 case '1':
5660 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5662 case '2':
5663 case '<':
5664 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5665 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5667 case 'e':
5668 case 'r':
5669 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5670 the expression. If it is set, we conflict iff we are that rtx or
5671 both are in memory. Otherwise, we check all operands of the
5672 expression recursively. */
5674 switch (TREE_CODE (exp))
5676 case ADDR_EXPR:
5677 return (staticp (TREE_OPERAND (exp, 0))
5678 || TREE_STATIC (exp)
5679 || safe_from_p (x, TREE_OPERAND (exp, 0), 0));
5681 case INDIRECT_REF:
5682 if (GET_CODE (x) == MEM
5683 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5684 get_alias_set (exp)))
5685 return 0;
5686 break;
5688 case CALL_EXPR:
5689 /* Assume that the call will clobber all hard registers and
5690 all of memory. */
5691 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5692 || GET_CODE (x) == MEM)
5693 return 0;
5694 break;
5696 case RTL_EXPR:
5697 /* If a sequence exists, we would have to scan every instruction
5698 in the sequence to see if it was safe. This is probably not
5699 worthwhile. */
5700 if (RTL_EXPR_SEQUENCE (exp))
5701 return 0;
5703 exp_rtl = RTL_EXPR_RTL (exp);
5704 break;
5706 case WITH_CLEANUP_EXPR:
5707 exp_rtl = RTL_EXPR_RTL (exp);
5708 break;
5710 case CLEANUP_POINT_EXPR:
5711 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5713 case SAVE_EXPR:
5714 exp_rtl = SAVE_EXPR_RTL (exp);
5715 if (exp_rtl)
5716 break;
5718 /* If we've already scanned this, don't do it again. Otherwise,
5719 show we've scanned it and record for clearing the flag if we're
5720 going on. */
5721 if (TREE_PRIVATE (exp))
5722 return 1;
5724 TREE_PRIVATE (exp) = 1;
5725 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5727 TREE_PRIVATE (exp) = 0;
5728 return 0;
5731 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5732 return 1;
5734 case BIND_EXPR:
5735 /* The only operand we look at is operand 1. The rest aren't
5736 part of the expression. */
5737 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5739 case METHOD_CALL_EXPR:
5740 /* This takes a rtx argument, but shouldn't appear here. */
5741 abort ();
5743 default:
5744 break;
5747 /* If we have an rtx, we do not need to scan our operands. */
5748 if (exp_rtl)
5749 break;
5751 nops = first_rtl_op (TREE_CODE (exp));
5752 for (i = 0; i < nops; i++)
5753 if (TREE_OPERAND (exp, i) != 0
5754 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5755 return 0;
5757 /* If this is a language-specific tree code, it may require
5758 special handling. */
5759 if (TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE
5760 && lang_safe_from_p
5761 && !(*lang_safe_from_p) (x, exp))
5762 return 0;
5765 /* If we have an rtl, find any enclosed object. Then see if we conflict
5766 with it. */
5767 if (exp_rtl)
5769 if (GET_CODE (exp_rtl) == SUBREG)
5771 exp_rtl = SUBREG_REG (exp_rtl);
5772 if (GET_CODE (exp_rtl) == REG
5773 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5774 return 0;
5777 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5778 are memory and they conflict. */
5779 return ! (rtx_equal_p (x, exp_rtl)
5780 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5781 && true_dependence (exp_rtl, GET_MODE (x), x,
5782 rtx_addr_varies_p)));
5785 /* If we reach here, it is safe. */
5786 return 1;
5789 /* Subroutine of expand_expr: return nonzero iff EXP is an
5790 expression whose type is statically determinable. */
5792 static int
5793 fixed_type_p (exp)
5794 tree exp;
5796 if (TREE_CODE (exp) == PARM_DECL
5797 || TREE_CODE (exp) == VAR_DECL
5798 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5799 || TREE_CODE (exp) == COMPONENT_REF
5800 || TREE_CODE (exp) == ARRAY_REF)
5801 return 1;
5802 return 0;
5805 /* Subroutine of expand_expr: return rtx if EXP is a
5806 variable or parameter; else return 0. */
5808 static rtx
5809 var_rtx (exp)
5810 tree exp;
5812 STRIP_NOPS (exp);
5813 switch (TREE_CODE (exp))
5815 case PARM_DECL:
5816 case VAR_DECL:
5817 return DECL_RTL (exp);
5818 default:
5819 return 0;
5823 #ifdef MAX_INTEGER_COMPUTATION_MODE
5824 void
5825 check_max_integer_computation_mode (exp)
5826 tree exp;
5828 enum tree_code code;
5829 enum machine_mode mode;
5831 /* Strip any NOPs that don't change the mode. */
5832 STRIP_NOPS (exp);
5833 code = TREE_CODE (exp);
5835 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5836 if (code == NOP_EXPR
5837 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5838 return;
5840 /* First check the type of the overall operation. We need only look at
5841 unary, binary and relational operations. */
5842 if (TREE_CODE_CLASS (code) == '1'
5843 || TREE_CODE_CLASS (code) == '2'
5844 || TREE_CODE_CLASS (code) == '<')
5846 mode = TYPE_MODE (TREE_TYPE (exp));
5847 if (GET_MODE_CLASS (mode) == MODE_INT
5848 && mode > MAX_INTEGER_COMPUTATION_MODE)
5849 fatal ("unsupported wide integer operation");
5852 /* Check operand of a unary op. */
5853 if (TREE_CODE_CLASS (code) == '1')
5855 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5856 if (GET_MODE_CLASS (mode) == MODE_INT
5857 && mode > MAX_INTEGER_COMPUTATION_MODE)
5858 fatal ("unsupported wide integer operation");
5861 /* Check operands of a binary/comparison op. */
5862 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5864 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5865 if (GET_MODE_CLASS (mode) == MODE_INT
5866 && mode > MAX_INTEGER_COMPUTATION_MODE)
5867 fatal ("unsupported wide integer operation");
5869 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5870 if (GET_MODE_CLASS (mode) == MODE_INT
5871 && mode > MAX_INTEGER_COMPUTATION_MODE)
5872 fatal ("unsupported wide integer operation");
5875 #endif
5877 /* expand_expr: generate code for computing expression EXP.
5878 An rtx for the computed value is returned. The value is never null.
5879 In the case of a void EXP, const0_rtx is returned.
5881 The value may be stored in TARGET if TARGET is nonzero.
5882 TARGET is just a suggestion; callers must assume that
5883 the rtx returned may not be the same as TARGET.
5885 If TARGET is CONST0_RTX, it means that the value will be ignored.
5887 If TMODE is not VOIDmode, it suggests generating the
5888 result in mode TMODE. But this is done only when convenient.
5889 Otherwise, TMODE is ignored and the value generated in its natural mode.
5890 TMODE is just a suggestion; callers must assume that
5891 the rtx returned may not have mode TMODE.
5893 Note that TARGET may have neither TMODE nor MODE. In that case, it
5894 probably will not be used.
5896 If MODIFIER is EXPAND_SUM then when EXP is an addition
5897 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5898 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5899 products as above, or REG or MEM, or constant.
5900 Ordinarily in such cases we would output mul or add instructions
5901 and then return a pseudo reg containing the sum.
5903 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5904 it also marks a label as absolutely required (it can't be dead).
5905 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5906 This is used for outputting expressions used in initializers.
5908 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5909 with a constant address even if that address is not normally legitimate.
5910 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5913 expand_expr (exp, target, tmode, modifier)
5914 register tree exp;
5915 rtx target;
5916 enum machine_mode tmode;
5917 enum expand_modifier modifier;
5919 register rtx op0, op1, temp;
5920 tree type = TREE_TYPE (exp);
5921 int unsignedp = TREE_UNSIGNED (type);
5922 register enum machine_mode mode;
5923 register enum tree_code code = TREE_CODE (exp);
5924 optab this_optab;
5925 rtx subtarget, original_target;
5926 int ignore;
5927 tree context;
5928 /* Used by check-memory-usage to make modifier read only. */
5929 enum expand_modifier ro_modifier;
5931 /* Handle ERROR_MARK before anybody tries to access its type. */
5932 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
5934 op0 = CONST0_RTX (tmode);
5935 if (op0 != 0)
5936 return op0;
5937 return const0_rtx;
5940 mode = TYPE_MODE (type);
5941 /* Use subtarget as the target for operand 0 of a binary operation. */
5942 subtarget = get_subtarget (target);
5943 original_target = target;
5944 ignore = (target == const0_rtx
5945 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5946 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5947 || code == COND_EXPR)
5948 && TREE_CODE (type) == VOID_TYPE));
5950 /* Make a read-only version of the modifier. */
5951 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5952 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5953 ro_modifier = modifier;
5954 else
5955 ro_modifier = EXPAND_NORMAL;
5957 /* If we are going to ignore this result, we need only do something
5958 if there is a side-effect somewhere in the expression. If there
5959 is, short-circuit the most common cases here. Note that we must
5960 not call expand_expr with anything but const0_rtx in case this
5961 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5963 if (ignore)
5965 if (! TREE_SIDE_EFFECTS (exp))
5966 return const0_rtx;
5968 /* Ensure we reference a volatile object even if value is ignored, but
5969 don't do this if all we are doing is taking its address. */
5970 if (TREE_THIS_VOLATILE (exp)
5971 && TREE_CODE (exp) != FUNCTION_DECL
5972 && mode != VOIDmode && mode != BLKmode
5973 && modifier != EXPAND_CONST_ADDRESS)
5975 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5976 if (GET_CODE (temp) == MEM)
5977 temp = copy_to_reg (temp);
5978 return const0_rtx;
5981 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
5982 || code == INDIRECT_REF || code == BUFFER_REF)
5983 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5984 VOIDmode, ro_modifier);
5985 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
5986 || code == ARRAY_REF)
5988 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5989 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5990 return const0_rtx;
5992 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5993 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5994 /* If the second operand has no side effects, just evaluate
5995 the first. */
5996 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5997 VOIDmode, ro_modifier);
5998 else if (code == BIT_FIELD_REF)
6000 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
6001 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
6002 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
6003 return const0_rtx;
6006 target = 0;
6009 #ifdef MAX_INTEGER_COMPUTATION_MODE
6010 /* Only check stuff here if the mode we want is different from the mode
6011 of the expression; if it's the same, check_max_integer_computiation_mode
6012 will handle it. Do we really need to check this stuff at all? */
6014 if (target
6015 && GET_MODE (target) != mode
6016 && TREE_CODE (exp) != INTEGER_CST
6017 && TREE_CODE (exp) != PARM_DECL
6018 && TREE_CODE (exp) != ARRAY_REF
6019 && TREE_CODE (exp) != COMPONENT_REF
6020 && TREE_CODE (exp) != BIT_FIELD_REF
6021 && TREE_CODE (exp) != INDIRECT_REF
6022 && TREE_CODE (exp) != CALL_EXPR
6023 && TREE_CODE (exp) != VAR_DECL
6024 && TREE_CODE (exp) != RTL_EXPR)
6026 enum machine_mode mode = GET_MODE (target);
6028 if (GET_MODE_CLASS (mode) == MODE_INT
6029 && mode > MAX_INTEGER_COMPUTATION_MODE)
6030 fatal ("unsupported wide integer operation");
6033 if (tmode != mode
6034 && TREE_CODE (exp) != INTEGER_CST
6035 && TREE_CODE (exp) != PARM_DECL
6036 && TREE_CODE (exp) != ARRAY_REF
6037 && TREE_CODE (exp) != COMPONENT_REF
6038 && TREE_CODE (exp) != BIT_FIELD_REF
6039 && TREE_CODE (exp) != INDIRECT_REF
6040 && TREE_CODE (exp) != VAR_DECL
6041 && TREE_CODE (exp) != CALL_EXPR
6042 && TREE_CODE (exp) != RTL_EXPR
6043 && GET_MODE_CLASS (tmode) == MODE_INT
6044 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6045 fatal ("unsupported wide integer operation");
6047 check_max_integer_computation_mode (exp);
6048 #endif
6050 /* If will do cse, generate all results into pseudo registers
6051 since 1) that allows cse to find more things
6052 and 2) otherwise cse could produce an insn the machine
6053 cannot support. */
6055 if (! cse_not_expected && mode != BLKmode && target
6056 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
6057 target = subtarget;
6059 switch (code)
6061 case LABEL_DECL:
6063 tree function = decl_function_context (exp);
6064 /* Handle using a label in a containing function. */
6065 if (function != current_function_decl
6066 && function != inline_function_decl && function != 0)
6068 struct function *p = find_function_data (function);
6069 p->expr->x_forced_labels
6070 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6071 p->expr->x_forced_labels);
6073 else
6075 if (modifier == EXPAND_INITIALIZER)
6076 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6077 label_rtx (exp),
6078 forced_labels);
6081 temp = gen_rtx_MEM (FUNCTION_MODE,
6082 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6083 if (function != current_function_decl
6084 && function != inline_function_decl && function != 0)
6085 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6086 return temp;
6089 case PARM_DECL:
6090 if (DECL_RTL (exp) == 0)
6092 error_with_decl (exp, "prior parameter's size depends on `%s'");
6093 return CONST0_RTX (mode);
6096 /* ... fall through ... */
6098 case VAR_DECL:
6099 /* If a static var's type was incomplete when the decl was written,
6100 but the type is complete now, lay out the decl now. */
6101 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6102 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6104 layout_decl (exp, 0);
6105 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
6108 /* Although static-storage variables start off initialized, according to
6109 ANSI C, a memcpy could overwrite them with uninitialized values. So
6110 we check them too. This also lets us check for read-only variables
6111 accessed via a non-const declaration, in case it won't be detected
6112 any other way (e.g., in an embedded system or OS kernel without
6113 memory protection).
6115 Aggregates are not checked here; they're handled elsewhere. */
6116 if (cfun && current_function_check_memory_usage
6117 && code == VAR_DECL
6118 && GET_CODE (DECL_RTL (exp)) == MEM
6119 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6121 enum memory_use_mode memory_usage;
6122 memory_usage = get_memory_usage_from_modifier (modifier);
6124 in_check_memory_usage = 1;
6125 if (memory_usage != MEMORY_USE_DONT)
6126 emit_library_call (chkr_check_addr_libfunc,
6127 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6128 XEXP (DECL_RTL (exp), 0), Pmode,
6129 GEN_INT (int_size_in_bytes (type)),
6130 TYPE_MODE (sizetype),
6131 GEN_INT (memory_usage),
6132 TYPE_MODE (integer_type_node));
6133 in_check_memory_usage = 0;
6136 /* ... fall through ... */
6138 case FUNCTION_DECL:
6139 case RESULT_DECL:
6140 if (DECL_RTL (exp) == 0)
6141 abort ();
6143 /* Ensure variable marked as used even if it doesn't go through
6144 a parser. If it hasn't be used yet, write out an external
6145 definition. */
6146 if (! TREE_USED (exp))
6148 assemble_external (exp);
6149 TREE_USED (exp) = 1;
6152 /* Show we haven't gotten RTL for this yet. */
6153 temp = 0;
6155 /* Handle variables inherited from containing functions. */
6156 context = decl_function_context (exp);
6158 /* We treat inline_function_decl as an alias for the current function
6159 because that is the inline function whose vars, types, etc.
6160 are being merged into the current function.
6161 See expand_inline_function. */
6163 if (context != 0 && context != current_function_decl
6164 && context != inline_function_decl
6165 /* If var is static, we don't need a static chain to access it. */
6166 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6167 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6169 rtx addr;
6171 /* Mark as non-local and addressable. */
6172 DECL_NONLOCAL (exp) = 1;
6173 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6174 abort ();
6175 mark_addressable (exp);
6176 if (GET_CODE (DECL_RTL (exp)) != MEM)
6177 abort ();
6178 addr = XEXP (DECL_RTL (exp), 0);
6179 if (GET_CODE (addr) == MEM)
6180 addr = change_address (addr, Pmode,
6181 fix_lexical_addr (XEXP (addr, 0), exp));
6182 else
6183 addr = fix_lexical_addr (addr, exp);
6185 temp = change_address (DECL_RTL (exp), mode, addr);
6188 /* This is the case of an array whose size is to be determined
6189 from its initializer, while the initializer is still being parsed.
6190 See expand_decl. */
6192 else if (GET_CODE (DECL_RTL (exp)) == MEM
6193 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6194 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
6195 XEXP (DECL_RTL (exp), 0));
6197 /* If DECL_RTL is memory, we are in the normal case and either
6198 the address is not valid or it is not a register and -fforce-addr
6199 is specified, get the address into a register. */
6201 else if (GET_CODE (DECL_RTL (exp)) == MEM
6202 && modifier != EXPAND_CONST_ADDRESS
6203 && modifier != EXPAND_SUM
6204 && modifier != EXPAND_INITIALIZER
6205 && (! memory_address_p (DECL_MODE (exp),
6206 XEXP (DECL_RTL (exp), 0))
6207 || (flag_force_addr
6208 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6209 temp = change_address (DECL_RTL (exp), VOIDmode,
6210 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6212 /* If we got something, return it. But first, set the alignment
6213 the address is a register. */
6214 if (temp != 0)
6216 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6217 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6219 return temp;
6222 /* If the mode of DECL_RTL does not match that of the decl, it
6223 must be a promoted value. We return a SUBREG of the wanted mode,
6224 but mark it so that we know that it was already extended. */
6226 if (GET_CODE (DECL_RTL (exp)) == REG
6227 && GET_MODE (DECL_RTL (exp)) != mode)
6229 /* Get the signedness used for this variable. Ensure we get the
6230 same mode we got when the variable was declared. */
6231 if (GET_MODE (DECL_RTL (exp))
6232 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6233 abort ();
6235 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
6236 SUBREG_PROMOTED_VAR_P (temp) = 1;
6237 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6238 return temp;
6241 return DECL_RTL (exp);
6243 case INTEGER_CST:
6244 return immed_double_const (TREE_INT_CST_LOW (exp),
6245 TREE_INT_CST_HIGH (exp), mode);
6247 case CONST_DECL:
6248 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6249 EXPAND_MEMORY_USE_BAD);
6251 case REAL_CST:
6252 /* If optimized, generate immediate CONST_DOUBLE
6253 which will be turned into memory by reload if necessary.
6255 We used to force a register so that loop.c could see it. But
6256 this does not allow gen_* patterns to perform optimizations with
6257 the constants. It also produces two insns in cases like "x = 1.0;".
6258 On most machines, floating-point constants are not permitted in
6259 many insns, so we'd end up copying it to a register in any case.
6261 Now, we do the copying in expand_binop, if appropriate. */
6262 return immed_real_const (exp);
6264 case COMPLEX_CST:
6265 case STRING_CST:
6266 if (! TREE_CST_RTL (exp))
6267 output_constant_def (exp, 1);
6269 /* TREE_CST_RTL probably contains a constant address.
6270 On RISC machines where a constant address isn't valid,
6271 make some insns to get that address into a register. */
6272 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6273 && modifier != EXPAND_CONST_ADDRESS
6274 && modifier != EXPAND_INITIALIZER
6275 && modifier != EXPAND_SUM
6276 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6277 || (flag_force_addr
6278 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6279 return change_address (TREE_CST_RTL (exp), VOIDmode,
6280 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6281 return TREE_CST_RTL (exp);
6283 case EXPR_WITH_FILE_LOCATION:
6285 rtx to_return;
6286 const char *saved_input_filename = input_filename;
6287 int saved_lineno = lineno;
6288 input_filename = EXPR_WFL_FILENAME (exp);
6289 lineno = EXPR_WFL_LINENO (exp);
6290 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6291 emit_line_note (input_filename, lineno);
6292 /* Possibly avoid switching back and force here. */
6293 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6294 input_filename = saved_input_filename;
6295 lineno = saved_lineno;
6296 return to_return;
6299 case SAVE_EXPR:
6300 context = decl_function_context (exp);
6302 /* If this SAVE_EXPR was at global context, assume we are an
6303 initialization function and move it into our context. */
6304 if (context == 0)
6305 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6307 /* We treat inline_function_decl as an alias for the current function
6308 because that is the inline function whose vars, types, etc.
6309 are being merged into the current function.
6310 See expand_inline_function. */
6311 if (context == current_function_decl || context == inline_function_decl)
6312 context = 0;
6314 /* If this is non-local, handle it. */
6315 if (context)
6317 /* The following call just exists to abort if the context is
6318 not of a containing function. */
6319 find_function_data (context);
6321 temp = SAVE_EXPR_RTL (exp);
6322 if (temp && GET_CODE (temp) == REG)
6324 put_var_into_stack (exp);
6325 temp = SAVE_EXPR_RTL (exp);
6327 if (temp == 0 || GET_CODE (temp) != MEM)
6328 abort ();
6329 return change_address (temp, mode,
6330 fix_lexical_addr (XEXP (temp, 0), exp));
6332 if (SAVE_EXPR_RTL (exp) == 0)
6334 if (mode == VOIDmode)
6335 temp = const0_rtx;
6336 else
6337 temp = assign_temp (build_qualified_type (type,
6338 (TYPE_QUALS (type)
6339 | TYPE_QUAL_CONST)),
6340 3, 0, 0);
6342 SAVE_EXPR_RTL (exp) = temp;
6343 if (!optimize && GET_CODE (temp) == REG)
6344 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6345 save_expr_regs);
6347 /* If the mode of TEMP does not match that of the expression, it
6348 must be a promoted value. We pass store_expr a SUBREG of the
6349 wanted mode but mark it so that we know that it was already
6350 extended. Note that `unsignedp' was modified above in
6351 this case. */
6353 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6355 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6356 SUBREG_PROMOTED_VAR_P (temp) = 1;
6357 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6360 if (temp == const0_rtx)
6361 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6362 EXPAND_MEMORY_USE_BAD);
6363 else
6364 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6366 TREE_USED (exp) = 1;
6369 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6370 must be a promoted value. We return a SUBREG of the wanted mode,
6371 but mark it so that we know that it was already extended. */
6373 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6374 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6376 /* Compute the signedness and make the proper SUBREG. */
6377 promote_mode (type, mode, &unsignedp, 0);
6378 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6379 SUBREG_PROMOTED_VAR_P (temp) = 1;
6380 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6381 return temp;
6384 return SAVE_EXPR_RTL (exp);
6386 case UNSAVE_EXPR:
6388 rtx temp;
6389 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6390 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6391 return temp;
6394 case PLACEHOLDER_EXPR:
6396 tree placeholder_expr;
6398 /* If there is an object on the head of the placeholder list,
6399 see if some object in it of type TYPE or a pointer to it. For
6400 further information, see tree.def. */
6401 for (placeholder_expr = placeholder_list;
6402 placeholder_expr != 0;
6403 placeholder_expr = TREE_CHAIN (placeholder_expr))
6405 tree need_type = TYPE_MAIN_VARIANT (type);
6406 tree object = 0;
6407 tree old_list = placeholder_list;
6408 tree elt;
6410 /* Find the outermost reference that is of the type we want.
6411 If none, see if any object has a type that is a pointer to
6412 the type we want. */
6413 for (elt = TREE_PURPOSE (placeholder_expr);
6414 elt != 0 && object == 0;
6416 = ((TREE_CODE (elt) == COMPOUND_EXPR
6417 || TREE_CODE (elt) == COND_EXPR)
6418 ? TREE_OPERAND (elt, 1)
6419 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6420 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6421 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6422 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6423 ? TREE_OPERAND (elt, 0) : 0))
6424 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6425 object = elt;
6427 for (elt = TREE_PURPOSE (placeholder_expr);
6428 elt != 0 && object == 0;
6430 = ((TREE_CODE (elt) == COMPOUND_EXPR
6431 || TREE_CODE (elt) == COND_EXPR)
6432 ? TREE_OPERAND (elt, 1)
6433 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6434 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6435 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6436 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6437 ? TREE_OPERAND (elt, 0) : 0))
6438 if (POINTER_TYPE_P (TREE_TYPE (elt))
6439 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6440 == need_type))
6441 object = build1 (INDIRECT_REF, need_type, elt);
6443 if (object != 0)
6445 /* Expand this object skipping the list entries before
6446 it was found in case it is also a PLACEHOLDER_EXPR.
6447 In that case, we want to translate it using subsequent
6448 entries. */
6449 placeholder_list = TREE_CHAIN (placeholder_expr);
6450 temp = expand_expr (object, original_target, tmode,
6451 ro_modifier);
6452 placeholder_list = old_list;
6453 return temp;
6458 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6459 abort ();
6461 case WITH_RECORD_EXPR:
6462 /* Put the object on the placeholder list, expand our first operand,
6463 and pop the list. */
6464 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6465 placeholder_list);
6466 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6467 tmode, ro_modifier);
6468 placeholder_list = TREE_CHAIN (placeholder_list);
6469 return target;
6471 case GOTO_EXPR:
6472 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6473 expand_goto (TREE_OPERAND (exp, 0));
6474 else
6475 expand_computed_goto (TREE_OPERAND (exp, 0));
6476 return const0_rtx;
6478 case EXIT_EXPR:
6479 expand_exit_loop_if_false (NULL_PTR,
6480 invert_truthvalue (TREE_OPERAND (exp, 0)));
6481 return const0_rtx;
6483 case LABELED_BLOCK_EXPR:
6484 if (LABELED_BLOCK_BODY (exp))
6485 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6486 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6487 return const0_rtx;
6489 case EXIT_BLOCK_EXPR:
6490 if (EXIT_BLOCK_RETURN (exp))
6491 sorry ("returned value in block_exit_expr");
6492 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6493 return const0_rtx;
6495 case LOOP_EXPR:
6496 push_temp_slots ();
6497 expand_start_loop (1);
6498 expand_expr_stmt (TREE_OPERAND (exp, 0));
6499 expand_end_loop ();
6500 pop_temp_slots ();
6502 return const0_rtx;
6504 case BIND_EXPR:
6506 tree vars = TREE_OPERAND (exp, 0);
6507 int vars_need_expansion = 0;
6509 /* Need to open a binding contour here because
6510 if there are any cleanups they must be contained here. */
6511 expand_start_bindings (2);
6513 /* Mark the corresponding BLOCK for output in its proper place. */
6514 if (TREE_OPERAND (exp, 2) != 0
6515 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6516 insert_block (TREE_OPERAND (exp, 2));
6518 /* If VARS have not yet been expanded, expand them now. */
6519 while (vars)
6521 if (DECL_RTL (vars) == 0)
6523 vars_need_expansion = 1;
6524 expand_decl (vars);
6526 expand_decl_init (vars);
6527 vars = TREE_CHAIN (vars);
6530 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6532 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6534 return temp;
6537 case RTL_EXPR:
6538 if (RTL_EXPR_SEQUENCE (exp))
6540 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6541 abort ();
6542 emit_insns (RTL_EXPR_SEQUENCE (exp));
6543 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6545 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6546 free_temps_for_rtl_expr (exp);
6547 return RTL_EXPR_RTL (exp);
6549 case CONSTRUCTOR:
6550 /* If we don't need the result, just ensure we evaluate any
6551 subexpressions. */
6552 if (ignore)
6554 tree elt;
6555 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6556 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6557 EXPAND_MEMORY_USE_BAD);
6558 return const0_rtx;
6561 /* All elts simple constants => refer to a constant in memory. But
6562 if this is a non-BLKmode mode, let it store a field at a time
6563 since that should make a CONST_INT or CONST_DOUBLE when we
6564 fold. Likewise, if we have a target we can use, it is best to
6565 store directly into the target unless the type is large enough
6566 that memcpy will be used. If we are making an initializer and
6567 all operands are constant, put it in memory as well. */
6568 else if ((TREE_STATIC (exp)
6569 && ((mode == BLKmode
6570 && ! (target != 0 && safe_from_p (target, exp, 1)))
6571 || TREE_ADDRESSABLE (exp)
6572 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6573 && (! MOVE_BY_PIECES_P
6574 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6575 TYPE_ALIGN (type)))
6576 && ! mostly_zeros_p (exp))))
6577 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6579 rtx constructor = output_constant_def (exp, 1);
6581 if (modifier != EXPAND_CONST_ADDRESS
6582 && modifier != EXPAND_INITIALIZER
6583 && modifier != EXPAND_SUM
6584 && (! memory_address_p (GET_MODE (constructor),
6585 XEXP (constructor, 0))
6586 || (flag_force_addr
6587 && GET_CODE (XEXP (constructor, 0)) != REG)))
6588 constructor = change_address (constructor, VOIDmode,
6589 XEXP (constructor, 0));
6590 return constructor;
6592 else
6594 /* Handle calls that pass values in multiple non-contiguous
6595 locations. The Irix 6 ABI has examples of this. */
6596 if (target == 0 || ! safe_from_p (target, exp, 1)
6597 || GET_CODE (target) == PARALLEL)
6598 target
6599 = assign_temp (build_qualified_type (type,
6600 (TYPE_QUALS (type)
6601 | (TREE_READONLY (exp)
6602 * TYPE_QUAL_CONST))),
6603 TREE_ADDRESSABLE (exp), 1, 1);
6605 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6606 int_size_in_bytes (TREE_TYPE (exp)));
6607 return target;
6610 case INDIRECT_REF:
6612 tree exp1 = TREE_OPERAND (exp, 0);
6613 tree index;
6614 tree string = string_constant (exp1, &index);
6616 /* Try to optimize reads from const strings. */
6617 if (string
6618 && TREE_CODE (string) == STRING_CST
6619 && TREE_CODE (index) == INTEGER_CST
6620 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6621 && GET_MODE_CLASS (mode) == MODE_INT
6622 && GET_MODE_SIZE (mode) == 1
6623 && modifier != EXPAND_MEMORY_USE_WO)
6624 return
6625 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6627 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6628 op0 = memory_address (mode, op0);
6630 if (cfun && current_function_check_memory_usage
6631 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6633 enum memory_use_mode memory_usage;
6634 memory_usage = get_memory_usage_from_modifier (modifier);
6636 if (memory_usage != MEMORY_USE_DONT)
6638 in_check_memory_usage = 1;
6639 emit_library_call (chkr_check_addr_libfunc,
6640 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6641 Pmode, GEN_INT (int_size_in_bytes (type)),
6642 TYPE_MODE (sizetype),
6643 GEN_INT (memory_usage),
6644 TYPE_MODE (integer_type_node));
6645 in_check_memory_usage = 0;
6649 temp = gen_rtx_MEM (mode, op0);
6650 set_mem_attributes (temp, exp, 0);
6652 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6653 here, because, in C and C++, the fact that a location is accessed
6654 through a pointer to const does not mean that the value there can
6655 never change. Languages where it can never change should
6656 also set TREE_STATIC. */
6657 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6659 /* If we are writing to this object and its type is a record with
6660 readonly fields, we must mark it as readonly so it will
6661 conflict with readonly references to those fields. */
6662 if (modifier == EXPAND_MEMORY_USE_WO && readonly_fields_p (type))
6663 RTX_UNCHANGING_P (temp) = 1;
6665 return temp;
6668 case ARRAY_REF:
6669 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6670 abort ();
6673 tree array = TREE_OPERAND (exp, 0);
6674 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6675 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6676 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6677 HOST_WIDE_INT i;
6679 /* Optimize the special-case of a zero lower bound.
6681 We convert the low_bound to sizetype to avoid some problems
6682 with constant folding. (E.g. suppose the lower bound is 1,
6683 and its mode is QI. Without the conversion, (ARRAY
6684 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6685 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6687 if (! integer_zerop (low_bound))
6688 index = size_diffop (index, convert (sizetype, low_bound));
6690 /* Fold an expression like: "foo"[2].
6691 This is not done in fold so it won't happen inside &.
6692 Don't fold if this is for wide characters since it's too
6693 difficult to do correctly and this is a very rare case. */
6695 if (TREE_CODE (array) == STRING_CST
6696 && TREE_CODE (index) == INTEGER_CST
6697 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6698 && GET_MODE_CLASS (mode) == MODE_INT
6699 && GET_MODE_SIZE (mode) == 1)
6700 return
6701 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6703 /* If this is a constant index into a constant array,
6704 just get the value from the array. Handle both the cases when
6705 we have an explicit constructor and when our operand is a variable
6706 that was declared const. */
6708 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6709 && TREE_CODE (index) == INTEGER_CST
6710 && 0 > compare_tree_int (index,
6711 list_length (CONSTRUCTOR_ELTS
6712 (TREE_OPERAND (exp, 0)))))
6714 tree elem;
6716 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6717 i = TREE_INT_CST_LOW (index);
6718 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6721 if (elem)
6722 return expand_expr (fold (TREE_VALUE (elem)), target,
6723 tmode, ro_modifier);
6726 else if (optimize >= 1
6727 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6728 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6729 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6731 if (TREE_CODE (index) == INTEGER_CST)
6733 tree init = DECL_INITIAL (array);
6735 if (TREE_CODE (init) == CONSTRUCTOR)
6737 tree elem;
6739 for (elem = CONSTRUCTOR_ELTS (init);
6740 (elem
6741 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6742 elem = TREE_CHAIN (elem))
6745 if (elem)
6746 return expand_expr (fold (TREE_VALUE (elem)), target,
6747 tmode, ro_modifier);
6749 else if (TREE_CODE (init) == STRING_CST
6750 && 0 > compare_tree_int (index,
6751 TREE_STRING_LENGTH (init)))
6753 tree type = TREE_TYPE (TREE_TYPE (init));
6754 enum machine_mode mode = TYPE_MODE (type);
6756 if (GET_MODE_CLASS (mode) == MODE_INT
6757 && GET_MODE_SIZE (mode) == 1)
6758 return (GEN_INT
6759 (TREE_STRING_POINTER
6760 (init)[TREE_INT_CST_LOW (index)]));
6765 /* Fall through. */
6767 case COMPONENT_REF:
6768 case BIT_FIELD_REF:
6769 /* If the operand is a CONSTRUCTOR, we can just extract the
6770 appropriate field if it is present. Don't do this if we have
6771 already written the data since we want to refer to that copy
6772 and varasm.c assumes that's what we'll do. */
6773 if (code != ARRAY_REF
6774 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6775 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6777 tree elt;
6779 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6780 elt = TREE_CHAIN (elt))
6781 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6782 /* We can normally use the value of the field in the
6783 CONSTRUCTOR. However, if this is a bitfield in
6784 an integral mode that we can fit in a HOST_WIDE_INT,
6785 we must mask only the number of bits in the bitfield,
6786 since this is done implicitly by the constructor. If
6787 the bitfield does not meet either of those conditions,
6788 we can't do this optimization. */
6789 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6790 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6791 == MODE_INT)
6792 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6793 <= HOST_BITS_PER_WIDE_INT))))
6795 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6796 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6798 HOST_WIDE_INT bitsize
6799 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6801 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6803 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6804 op0 = expand_and (op0, op1, target);
6806 else
6808 enum machine_mode imode
6809 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6810 tree count
6811 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6814 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6815 target, 0);
6816 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6817 target, 0);
6821 return op0;
6826 enum machine_mode mode1;
6827 HOST_WIDE_INT bitsize, bitpos;
6828 tree offset;
6829 int volatilep = 0;
6830 unsigned int alignment;
6831 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6832 &mode1, &unsignedp, &volatilep,
6833 &alignment);
6835 /* If we got back the original object, something is wrong. Perhaps
6836 we are evaluating an expression too early. In any event, don't
6837 infinitely recurse. */
6838 if (tem == exp)
6839 abort ();
6841 /* If TEM's type is a union of variable size, pass TARGET to the inner
6842 computation, since it will need a temporary and TARGET is known
6843 to have to do. This occurs in unchecked conversion in Ada. */
6845 op0 = expand_expr (tem,
6846 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6847 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6848 != INTEGER_CST)
6849 ? target : NULL_RTX),
6850 VOIDmode,
6851 (modifier == EXPAND_INITIALIZER
6852 || modifier == EXPAND_CONST_ADDRESS)
6853 ? modifier : EXPAND_NORMAL);
6855 /* If this is a constant, put it into a register if it is a
6856 legitimate constant and OFFSET is 0 and memory if it isn't. */
6857 if (CONSTANT_P (op0))
6859 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6860 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6861 && offset == 0)
6862 op0 = force_reg (mode, op0);
6863 else
6864 op0 = validize_mem (force_const_mem (mode, op0));
6867 if (offset != 0)
6869 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6871 /* If this object is in memory, put it into a register.
6872 This case can't occur in C, but can in Ada if we have
6873 unchecked conversion of an expression from a scalar type to
6874 an array or record type. */
6875 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6876 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6878 tree nt = build_qualified_type (TREE_TYPE (tem),
6879 (TYPE_QUALS (TREE_TYPE (tem))
6880 | TYPE_QUAL_CONST));
6881 rtx memloc = assign_temp (nt, 1, 1, 1);
6883 mark_temp_addr_taken (memloc);
6884 emit_move_insn (memloc, op0);
6885 op0 = memloc;
6888 if (GET_CODE (op0) != MEM)
6889 abort ();
6891 if (GET_MODE (offset_rtx) != ptr_mode)
6893 #ifdef POINTERS_EXTEND_UNSIGNED
6894 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6895 #else
6896 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6897 #endif
6900 /* A constant address in OP0 can have VOIDmode, we must not try
6901 to call force_reg for that case. Avoid that case. */
6902 if (GET_CODE (op0) == MEM
6903 && GET_MODE (op0) == BLKmode
6904 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6905 && bitsize != 0
6906 && (bitpos % bitsize) == 0
6907 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6908 && alignment == GET_MODE_ALIGNMENT (mode1))
6910 rtx temp = change_address (op0, mode1,
6911 plus_constant (XEXP (op0, 0),
6912 (bitpos /
6913 BITS_PER_UNIT)));
6914 if (GET_CODE (XEXP (temp, 0)) == REG)
6915 op0 = temp;
6916 else
6917 op0 = change_address (op0, mode1,
6918 force_reg (GET_MODE (XEXP (temp, 0)),
6919 XEXP (temp, 0)));
6920 bitpos = 0;
6923 op0 = change_address (op0, VOIDmode,
6924 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6925 force_reg (ptr_mode,
6926 offset_rtx)));
6929 /* Don't forget about volatility even if this is a bitfield. */
6930 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6932 op0 = copy_rtx (op0);
6933 MEM_VOLATILE_P (op0) = 1;
6936 /* Check the access. */
6937 if (cfun != 0 && current_function_check_memory_usage
6938 && GET_CODE (op0) == MEM)
6940 enum memory_use_mode memory_usage;
6941 memory_usage = get_memory_usage_from_modifier (modifier);
6943 if (memory_usage != MEMORY_USE_DONT)
6945 rtx to;
6946 int size;
6948 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6949 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6951 /* Check the access right of the pointer. */
6952 in_check_memory_usage = 1;
6953 if (size > BITS_PER_UNIT)
6954 emit_library_call (chkr_check_addr_libfunc,
6955 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
6956 Pmode, GEN_INT (size / BITS_PER_UNIT),
6957 TYPE_MODE (sizetype),
6958 GEN_INT (memory_usage),
6959 TYPE_MODE (integer_type_node));
6960 in_check_memory_usage = 0;
6964 /* In cases where an aligned union has an unaligned object
6965 as a field, we might be extracting a BLKmode value from
6966 an integer-mode (e.g., SImode) object. Handle this case
6967 by doing the extract into an object as wide as the field
6968 (which we know to be the width of a basic mode), then
6969 storing into memory, and changing the mode to BLKmode.
6970 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6971 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6972 if (mode1 == VOIDmode
6973 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6974 || (modifier != EXPAND_CONST_ADDRESS
6975 && modifier != EXPAND_INITIALIZER
6976 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6977 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6978 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6979 /* If the field isn't aligned enough to fetch as a memref,
6980 fetch it as a bit field. */
6981 || (mode1 != BLKmode
6982 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
6983 && ((TYPE_ALIGN (TREE_TYPE (tem))
6984 < GET_MODE_ALIGNMENT (mode))
6985 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
6986 /* If the type and the field are a constant size and the
6987 size of the type isn't the same size as the bitfield,
6988 we must use bitfield operations. */
6989 || ((bitsize >= 0
6990 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
6991 == INTEGER_CST)
6992 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6993 bitsize)))))
6994 || (modifier != EXPAND_CONST_ADDRESS
6995 && modifier != EXPAND_INITIALIZER
6996 && mode == BLKmode
6997 && SLOW_UNALIGNED_ACCESS (mode, alignment)
6998 && (TYPE_ALIGN (type) > alignment
6999 || bitpos % TYPE_ALIGN (type) != 0)))
7001 enum machine_mode ext_mode = mode;
7003 if (ext_mode == BLKmode
7004 && ! (target != 0 && GET_CODE (op0) == MEM
7005 && GET_CODE (target) == MEM
7006 && bitpos % BITS_PER_UNIT == 0))
7007 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7009 if (ext_mode == BLKmode)
7011 /* In this case, BITPOS must start at a byte boundary and
7012 TARGET, if specified, must be a MEM. */
7013 if (GET_CODE (op0) != MEM
7014 || (target != 0 && GET_CODE (target) != MEM)
7015 || bitpos % BITS_PER_UNIT != 0)
7016 abort ();
7018 op0 = change_address (op0, VOIDmode,
7019 plus_constant (XEXP (op0, 0),
7020 bitpos / BITS_PER_UNIT));
7021 if (target == 0)
7022 target = assign_temp (type, 0, 1, 1);
7024 emit_block_move (target, op0,
7025 bitsize == -1 ? expr_size (exp)
7026 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7027 / BITS_PER_UNIT),
7028 BITS_PER_UNIT);
7030 return target;
7033 op0 = validize_mem (op0);
7035 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7036 mark_reg_pointer (XEXP (op0, 0), alignment);
7038 op0 = extract_bit_field (op0, bitsize, bitpos,
7039 unsignedp, target, ext_mode, ext_mode,
7040 alignment,
7041 int_size_in_bytes (TREE_TYPE (tem)));
7043 /* If the result is a record type and BITSIZE is narrower than
7044 the mode of OP0, an integral mode, and this is a big endian
7045 machine, we must put the field into the high-order bits. */
7046 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7047 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7048 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
7049 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7050 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7051 - bitsize),
7052 op0, 1);
7054 if (mode == BLKmode)
7056 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
7057 TYPE_QUAL_CONST);
7058 rtx new = assign_temp (nt, 0, 1, 1);
7060 emit_move_insn (new, op0);
7061 op0 = copy_rtx (new);
7062 PUT_MODE (op0, BLKmode);
7065 return op0;
7068 /* If the result is BLKmode, use that to access the object
7069 now as well. */
7070 if (mode == BLKmode)
7071 mode1 = BLKmode;
7073 /* Get a reference to just this component. */
7074 if (modifier == EXPAND_CONST_ADDRESS
7075 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7077 rtx new = gen_rtx_MEM (mode1,
7078 plus_constant (XEXP (op0, 0),
7079 (bitpos / BITS_PER_UNIT)));
7081 MEM_COPY_ATTRIBUTES (new, op0);
7082 op0 = new;
7084 else
7085 op0 = change_address (op0, mode1,
7086 plus_constant (XEXP (op0, 0),
7087 (bitpos / BITS_PER_UNIT)));
7089 set_mem_attributes (op0, exp, 0);
7090 if (GET_CODE (XEXP (op0, 0)) == REG)
7091 mark_reg_pointer (XEXP (op0, 0), alignment);
7093 MEM_VOLATILE_P (op0) |= volatilep;
7094 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7095 || modifier == EXPAND_CONST_ADDRESS
7096 || modifier == EXPAND_INITIALIZER)
7097 return op0;
7098 else if (target == 0)
7099 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7101 convert_move (target, op0, unsignedp);
7102 return target;
7105 /* Intended for a reference to a buffer of a file-object in Pascal.
7106 But it's not certain that a special tree code will really be
7107 necessary for these. INDIRECT_REF might work for them. */
7108 case BUFFER_REF:
7109 abort ();
7111 case IN_EXPR:
7113 /* Pascal set IN expression.
7115 Algorithm:
7116 rlo = set_low - (set_low%bits_per_word);
7117 the_word = set [ (index - rlo)/bits_per_word ];
7118 bit_index = index % bits_per_word;
7119 bitmask = 1 << bit_index;
7120 return !!(the_word & bitmask); */
7122 tree set = TREE_OPERAND (exp, 0);
7123 tree index = TREE_OPERAND (exp, 1);
7124 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7125 tree set_type = TREE_TYPE (set);
7126 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7127 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7128 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7129 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7130 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7131 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7132 rtx setaddr = XEXP (setval, 0);
7133 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7134 rtx rlow;
7135 rtx diff, quo, rem, addr, bit, result;
7137 /* If domain is empty, answer is no. Likewise if index is constant
7138 and out of bounds. */
7139 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7140 && TREE_CODE (set_low_bound) == INTEGER_CST
7141 && tree_int_cst_lt (set_high_bound, set_low_bound))
7142 || (TREE_CODE (index) == INTEGER_CST
7143 && TREE_CODE (set_low_bound) == INTEGER_CST
7144 && tree_int_cst_lt (index, set_low_bound))
7145 || (TREE_CODE (set_high_bound) == INTEGER_CST
7146 && TREE_CODE (index) == INTEGER_CST
7147 && tree_int_cst_lt (set_high_bound, index))))
7148 return const0_rtx;
7150 if (target == 0)
7151 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7153 /* If we get here, we have to generate the code for both cases
7154 (in range and out of range). */
7156 op0 = gen_label_rtx ();
7157 op1 = gen_label_rtx ();
7159 if (! (GET_CODE (index_val) == CONST_INT
7160 && GET_CODE (lo_r) == CONST_INT))
7162 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7163 GET_MODE (index_val), iunsignedp, 0, op1);
7166 if (! (GET_CODE (index_val) == CONST_INT
7167 && GET_CODE (hi_r) == CONST_INT))
7169 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7170 GET_MODE (index_val), iunsignedp, 0, op1);
7173 /* Calculate the element number of bit zero in the first word
7174 of the set. */
7175 if (GET_CODE (lo_r) == CONST_INT)
7176 rlow = GEN_INT (INTVAL (lo_r)
7177 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7178 else
7179 rlow = expand_binop (index_mode, and_optab, lo_r,
7180 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7181 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7183 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7184 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7186 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7187 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7188 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7189 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7191 addr = memory_address (byte_mode,
7192 expand_binop (index_mode, add_optab, diff,
7193 setaddr, NULL_RTX, iunsignedp,
7194 OPTAB_LIB_WIDEN));
7196 /* Extract the bit we want to examine. */
7197 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7198 gen_rtx_MEM (byte_mode, addr),
7199 make_tree (TREE_TYPE (index), rem),
7200 NULL_RTX, 1);
7201 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7202 GET_MODE (target) == byte_mode ? target : 0,
7203 1, OPTAB_LIB_WIDEN);
7205 if (result != target)
7206 convert_move (target, result, 1);
7208 /* Output the code to handle the out-of-range case. */
7209 emit_jump (op0);
7210 emit_label (op1);
7211 emit_move_insn (target, const0_rtx);
7212 emit_label (op0);
7213 return target;
7216 case WITH_CLEANUP_EXPR:
7217 if (RTL_EXPR_RTL (exp) == 0)
7219 RTL_EXPR_RTL (exp)
7220 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7221 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
7223 /* That's it for this cleanup. */
7224 TREE_OPERAND (exp, 2) = 0;
7226 return RTL_EXPR_RTL (exp);
7228 case CLEANUP_POINT_EXPR:
7230 /* Start a new binding layer that will keep track of all cleanup
7231 actions to be performed. */
7232 expand_start_bindings (2);
7234 target_temp_slot_level = temp_slot_level;
7236 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7237 /* If we're going to use this value, load it up now. */
7238 if (! ignore)
7239 op0 = force_not_mem (op0);
7240 preserve_temp_slots (op0);
7241 expand_end_bindings (NULL_TREE, 0, 0);
7243 return op0;
7245 case CALL_EXPR:
7246 /* Check for a built-in function. */
7247 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7248 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7249 == FUNCTION_DECL)
7250 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7252 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7253 == BUILT_IN_FRONTEND)
7254 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7255 else
7256 return expand_builtin (exp, target, subtarget, tmode, ignore);
7259 return expand_call (exp, target, ignore);
7261 case NON_LVALUE_EXPR:
7262 case NOP_EXPR:
7263 case CONVERT_EXPR:
7264 case REFERENCE_EXPR:
7265 if (TREE_OPERAND (exp, 0) == error_mark_node)
7266 return const0_rtx;
7268 if (TREE_CODE (type) == UNION_TYPE)
7270 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7272 /* If both input and output are BLKmode, this conversion
7273 isn't actually doing anything unless we need to make the
7274 alignment stricter. */
7275 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7276 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7277 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7278 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7279 modifier);
7281 if (target == 0)
7282 target = assign_temp (type, 0, 1, 1);
7284 if (GET_CODE (target) == MEM)
7285 /* Store data into beginning of memory target. */
7286 store_expr (TREE_OPERAND (exp, 0),
7287 change_address (target, TYPE_MODE (valtype), 0), 0);
7289 else if (GET_CODE (target) == REG)
7290 /* Store this field into a union of the proper type. */
7291 store_field (target,
7292 MIN ((int_size_in_bytes (TREE_TYPE
7293 (TREE_OPERAND (exp, 0)))
7294 * BITS_PER_UNIT),
7295 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7296 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7297 VOIDmode, 0, BITS_PER_UNIT,
7298 int_size_in_bytes (type), 0);
7299 else
7300 abort ();
7302 /* Return the entire union. */
7303 return target;
7306 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7308 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7309 ro_modifier);
7311 /* If the signedness of the conversion differs and OP0 is
7312 a promoted SUBREG, clear that indication since we now
7313 have to do the proper extension. */
7314 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7315 && GET_CODE (op0) == SUBREG)
7316 SUBREG_PROMOTED_VAR_P (op0) = 0;
7318 return op0;
7321 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7322 if (GET_MODE (op0) == mode)
7323 return op0;
7325 /* If OP0 is a constant, just convert it into the proper mode. */
7326 if (CONSTANT_P (op0))
7327 return
7328 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7329 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7331 if (modifier == EXPAND_INITIALIZER)
7332 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7334 if (target == 0)
7335 return
7336 convert_to_mode (mode, op0,
7337 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7338 else
7339 convert_move (target, op0,
7340 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7341 return target;
7343 case PLUS_EXPR:
7344 /* We come here from MINUS_EXPR when the second operand is a
7345 constant. */
7346 plus_expr:
7347 this_optab = ! unsignedp && flag_trapv
7348 && (GET_MODE_CLASS(mode) == MODE_INT)
7349 ? addv_optab : add_optab;
7351 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7352 something else, make sure we add the register to the constant and
7353 then to the other thing. This case can occur during strength
7354 reduction and doing it this way will produce better code if the
7355 frame pointer or argument pointer is eliminated.
7357 fold-const.c will ensure that the constant is always in the inner
7358 PLUS_EXPR, so the only case we need to do anything about is if
7359 sp, ap, or fp is our second argument, in which case we must swap
7360 the innermost first argument and our second argument. */
7362 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7363 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7364 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7365 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7366 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7367 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7369 tree t = TREE_OPERAND (exp, 1);
7371 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7372 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7375 /* If the result is to be ptr_mode and we are adding an integer to
7376 something, we might be forming a constant. So try to use
7377 plus_constant. If it produces a sum and we can't accept it,
7378 use force_operand. This allows P = &ARR[const] to generate
7379 efficient code on machines where a SYMBOL_REF is not a valid
7380 address.
7382 If this is an EXPAND_SUM call, always return the sum. */
7383 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7384 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7386 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7387 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7388 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7390 rtx constant_part;
7392 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7393 EXPAND_SUM);
7394 /* Use immed_double_const to ensure that the constant is
7395 truncated according to the mode of OP1, then sign extended
7396 to a HOST_WIDE_INT. Using the constant directly can result
7397 in non-canonical RTL in a 64x32 cross compile. */
7398 constant_part
7399 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7400 (HOST_WIDE_INT) 0,
7401 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7402 op1 = plus_constant (op1, INTVAL (constant_part));
7403 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7404 op1 = force_operand (op1, target);
7405 return op1;
7408 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7409 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7410 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7412 rtx constant_part;
7414 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7415 EXPAND_SUM);
7416 if (! CONSTANT_P (op0))
7418 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7419 VOIDmode, modifier);
7420 /* Don't go to both_summands if modifier
7421 says it's not right to return a PLUS. */
7422 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7423 goto binop2;
7424 goto both_summands;
7426 /* Use immed_double_const to ensure that the constant is
7427 truncated according to the mode of OP1, then sign extended
7428 to a HOST_WIDE_INT. Using the constant directly can result
7429 in non-canonical RTL in a 64x32 cross compile. */
7430 constant_part
7431 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7432 (HOST_WIDE_INT) 0,
7433 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7434 op0 = plus_constant (op0, INTVAL (constant_part));
7435 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7436 op0 = force_operand (op0, target);
7437 return op0;
7441 /* No sense saving up arithmetic to be done
7442 if it's all in the wrong mode to form part of an address.
7443 And force_operand won't know whether to sign-extend or
7444 zero-extend. */
7445 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7446 || mode != ptr_mode)
7447 goto binop;
7449 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7450 subtarget = 0;
7452 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7453 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7455 both_summands:
7456 /* Make sure any term that's a sum with a constant comes last. */
7457 if (GET_CODE (op0) == PLUS
7458 && CONSTANT_P (XEXP (op0, 1)))
7460 temp = op0;
7461 op0 = op1;
7462 op1 = temp;
7464 /* If adding to a sum including a constant,
7465 associate it to put the constant outside. */
7466 if (GET_CODE (op1) == PLUS
7467 && CONSTANT_P (XEXP (op1, 1)))
7469 rtx constant_term = const0_rtx;
7471 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7472 if (temp != 0)
7473 op0 = temp;
7474 /* Ensure that MULT comes first if there is one. */
7475 else if (GET_CODE (op0) == MULT)
7476 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7477 else
7478 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7480 /* Let's also eliminate constants from op0 if possible. */
7481 op0 = eliminate_constant_term (op0, &constant_term);
7483 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7484 their sum should be a constant. Form it into OP1, since the
7485 result we want will then be OP0 + OP1. */
7487 temp = simplify_binary_operation (PLUS, mode, constant_term,
7488 XEXP (op1, 1));
7489 if (temp != 0)
7490 op1 = temp;
7491 else
7492 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7495 /* Put a constant term last and put a multiplication first. */
7496 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7497 temp = op1, op1 = op0, op0 = temp;
7499 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7500 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7502 case MINUS_EXPR:
7503 /* For initializers, we are allowed to return a MINUS of two
7504 symbolic constants. Here we handle all cases when both operands
7505 are constant. */
7506 /* Handle difference of two symbolic constants,
7507 for the sake of an initializer. */
7508 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7509 && really_constant_p (TREE_OPERAND (exp, 0))
7510 && really_constant_p (TREE_OPERAND (exp, 1)))
7512 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7513 VOIDmode, ro_modifier);
7514 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7515 VOIDmode, ro_modifier);
7517 /* If the last operand is a CONST_INT, use plus_constant of
7518 the negated constant. Else make the MINUS. */
7519 if (GET_CODE (op1) == CONST_INT)
7520 return plus_constant (op0, - INTVAL (op1));
7521 else
7522 return gen_rtx_MINUS (mode, op0, op1);
7524 /* Convert A - const to A + (-const). */
7525 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7527 tree negated = fold (build1 (NEGATE_EXPR, type,
7528 TREE_OPERAND (exp, 1)));
7530 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7531 /* If we can't negate the constant in TYPE, leave it alone and
7532 expand_binop will negate it for us. We used to try to do it
7533 here in the signed version of TYPE, but that doesn't work
7534 on POINTER_TYPEs. */;
7535 else
7537 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7538 goto plus_expr;
7541 this_optab = ! unsignedp && flag_trapv
7542 && (GET_MODE_CLASS(mode) == MODE_INT)
7543 ? subv_optab : sub_optab;
7544 goto binop;
7546 case MULT_EXPR:
7547 /* If first operand is constant, swap them.
7548 Thus the following special case checks need only
7549 check the second operand. */
7550 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7552 register tree t1 = TREE_OPERAND (exp, 0);
7553 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7554 TREE_OPERAND (exp, 1) = t1;
7557 /* Attempt to return something suitable for generating an
7558 indexed address, for machines that support that. */
7560 if (modifier == EXPAND_SUM && mode == ptr_mode
7561 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7562 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7564 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7565 EXPAND_SUM);
7567 /* Apply distributive law if OP0 is x+c. */
7568 if (GET_CODE (op0) == PLUS
7569 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7570 return
7571 gen_rtx_PLUS
7572 (mode,
7573 gen_rtx_MULT
7574 (mode, XEXP (op0, 0),
7575 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7576 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7577 * INTVAL (XEXP (op0, 1))));
7579 if (GET_CODE (op0) != REG)
7580 op0 = force_operand (op0, NULL_RTX);
7581 if (GET_CODE (op0) != REG)
7582 op0 = copy_to_mode_reg (mode, op0);
7584 return
7585 gen_rtx_MULT (mode, op0,
7586 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7589 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7590 subtarget = 0;
7592 /* Check for multiplying things that have been extended
7593 from a narrower type. If this machine supports multiplying
7594 in that narrower type with a result in the desired type,
7595 do it that way, and avoid the explicit type-conversion. */
7596 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7597 && TREE_CODE (type) == INTEGER_TYPE
7598 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7599 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7600 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7601 && int_fits_type_p (TREE_OPERAND (exp, 1),
7602 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7603 /* Don't use a widening multiply if a shift will do. */
7604 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7605 > HOST_BITS_PER_WIDE_INT)
7606 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7608 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7609 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7611 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7612 /* If both operands are extended, they must either both
7613 be zero-extended or both be sign-extended. */
7614 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7616 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7618 enum machine_mode innermode
7619 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7620 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7621 ? smul_widen_optab : umul_widen_optab);
7622 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7623 ? umul_widen_optab : smul_widen_optab);
7624 if (mode == GET_MODE_WIDER_MODE (innermode))
7626 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7628 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7629 NULL_RTX, VOIDmode, 0);
7630 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7631 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7632 VOIDmode, 0);
7633 else
7634 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7635 NULL_RTX, VOIDmode, 0);
7636 goto binop2;
7638 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7639 && innermode == word_mode)
7641 rtx htem;
7642 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7643 NULL_RTX, VOIDmode, 0);
7644 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7645 op1 = convert_modes (innermode, mode,
7646 expand_expr (TREE_OPERAND (exp, 1),
7647 NULL_RTX, VOIDmode, 0),
7648 unsignedp);
7649 else
7650 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7651 NULL_RTX, VOIDmode, 0);
7652 temp = expand_binop (mode, other_optab, op0, op1, target,
7653 unsignedp, OPTAB_LIB_WIDEN);
7654 htem = expand_mult_highpart_adjust (innermode,
7655 gen_highpart (innermode, temp),
7656 op0, op1,
7657 gen_highpart (innermode, temp),
7658 unsignedp);
7659 emit_move_insn (gen_highpart (innermode, temp), htem);
7660 return temp;
7664 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7665 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7666 return expand_mult (mode, op0, op1, target, unsignedp);
7668 case TRUNC_DIV_EXPR:
7669 case FLOOR_DIV_EXPR:
7670 case CEIL_DIV_EXPR:
7671 case ROUND_DIV_EXPR:
7672 case EXACT_DIV_EXPR:
7673 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7674 subtarget = 0;
7675 /* Possible optimization: compute the dividend with EXPAND_SUM
7676 then if the divisor is constant can optimize the case
7677 where some terms of the dividend have coeffs divisible by it. */
7678 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7679 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7680 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7682 case RDIV_EXPR:
7683 this_optab = flodiv_optab;
7684 goto binop;
7686 case TRUNC_MOD_EXPR:
7687 case FLOOR_MOD_EXPR:
7688 case CEIL_MOD_EXPR:
7689 case ROUND_MOD_EXPR:
7690 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7691 subtarget = 0;
7692 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7693 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7694 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7696 case FIX_ROUND_EXPR:
7697 case FIX_FLOOR_EXPR:
7698 case FIX_CEIL_EXPR:
7699 abort (); /* Not used for C. */
7701 case FIX_TRUNC_EXPR:
7702 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7703 if (target == 0)
7704 target = gen_reg_rtx (mode);
7705 expand_fix (target, op0, unsignedp);
7706 return target;
7708 case FLOAT_EXPR:
7709 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7710 if (target == 0)
7711 target = gen_reg_rtx (mode);
7712 /* expand_float can't figure out what to do if FROM has VOIDmode.
7713 So give it the correct mode. With -O, cse will optimize this. */
7714 if (GET_MODE (op0) == VOIDmode)
7715 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7716 op0);
7717 expand_float (target, op0,
7718 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7719 return target;
7721 case NEGATE_EXPR:
7722 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7723 temp = expand_unop (mode,
7724 ! unsignedp && flag_trapv
7725 && (GET_MODE_CLASS(mode) == MODE_INT)
7726 ? negv_optab : neg_optab, op0, target, 0);
7727 if (temp == 0)
7728 abort ();
7729 return temp;
7731 case ABS_EXPR:
7732 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7734 /* Handle complex values specially. */
7735 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7736 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7737 return expand_complex_abs (mode, op0, target, unsignedp);
7739 /* Unsigned abs is simply the operand. Testing here means we don't
7740 risk generating incorrect code below. */
7741 if (TREE_UNSIGNED (type))
7742 return op0;
7744 return expand_abs (mode, op0, target, unsignedp,
7745 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7747 case MAX_EXPR:
7748 case MIN_EXPR:
7749 target = original_target;
7750 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7751 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7752 || GET_MODE (target) != mode
7753 || (GET_CODE (target) == REG
7754 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7755 target = gen_reg_rtx (mode);
7756 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7757 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7759 /* First try to do it with a special MIN or MAX instruction.
7760 If that does not win, use a conditional jump to select the proper
7761 value. */
7762 this_optab = (TREE_UNSIGNED (type)
7763 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7764 : (code == MIN_EXPR ? smin_optab : smax_optab));
7766 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7767 OPTAB_WIDEN);
7768 if (temp != 0)
7769 return temp;
7771 /* At this point, a MEM target is no longer useful; we will get better
7772 code without it. */
7774 if (GET_CODE (target) == MEM)
7775 target = gen_reg_rtx (mode);
7777 if (target != op0)
7778 emit_move_insn (target, op0);
7780 op0 = gen_label_rtx ();
7782 /* If this mode is an integer too wide to compare properly,
7783 compare word by word. Rely on cse to optimize constant cases. */
7784 if (GET_MODE_CLASS (mode) == MODE_INT
7785 && ! can_compare_p (GE, mode, ccp_jump))
7787 if (code == MAX_EXPR)
7788 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7789 target, op1, NULL_RTX, op0);
7790 else
7791 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7792 op1, target, NULL_RTX, op0);
7794 else
7796 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7797 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7798 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7799 op0);
7801 emit_move_insn (target, op1);
7802 emit_label (op0);
7803 return target;
7805 case BIT_NOT_EXPR:
7806 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7807 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7808 if (temp == 0)
7809 abort ();
7810 return temp;
7812 case FFS_EXPR:
7813 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7814 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7815 if (temp == 0)
7816 abort ();
7817 return temp;
7819 /* ??? Can optimize bitwise operations with one arg constant.
7820 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7821 and (a bitwise1 b) bitwise2 b (etc)
7822 but that is probably not worth while. */
7824 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7825 boolean values when we want in all cases to compute both of them. In
7826 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7827 as actual zero-or-1 values and then bitwise anding. In cases where
7828 there cannot be any side effects, better code would be made by
7829 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7830 how to recognize those cases. */
7832 case TRUTH_AND_EXPR:
7833 case BIT_AND_EXPR:
7834 this_optab = and_optab;
7835 goto binop;
7837 case TRUTH_OR_EXPR:
7838 case BIT_IOR_EXPR:
7839 this_optab = ior_optab;
7840 goto binop;
7842 case TRUTH_XOR_EXPR:
7843 case BIT_XOR_EXPR:
7844 this_optab = xor_optab;
7845 goto binop;
7847 case LSHIFT_EXPR:
7848 case RSHIFT_EXPR:
7849 case LROTATE_EXPR:
7850 case RROTATE_EXPR:
7851 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7852 subtarget = 0;
7853 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7854 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7855 unsignedp);
7857 /* Could determine the answer when only additive constants differ. Also,
7858 the addition of one can be handled by changing the condition. */
7859 case LT_EXPR:
7860 case LE_EXPR:
7861 case GT_EXPR:
7862 case GE_EXPR:
7863 case EQ_EXPR:
7864 case NE_EXPR:
7865 case UNORDERED_EXPR:
7866 case ORDERED_EXPR:
7867 case UNLT_EXPR:
7868 case UNLE_EXPR:
7869 case UNGT_EXPR:
7870 case UNGE_EXPR:
7871 case UNEQ_EXPR:
7872 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7873 if (temp != 0)
7874 return temp;
7876 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7877 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7878 && original_target
7879 && GET_CODE (original_target) == REG
7880 && (GET_MODE (original_target)
7881 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7883 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7884 VOIDmode, 0);
7886 if (temp != original_target)
7887 temp = copy_to_reg (temp);
7889 op1 = gen_label_rtx ();
7890 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7891 GET_MODE (temp), unsignedp, 0, op1);
7892 emit_move_insn (temp, const1_rtx);
7893 emit_label (op1);
7894 return temp;
7897 /* If no set-flag instruction, must generate a conditional
7898 store into a temporary variable. Drop through
7899 and handle this like && and ||. */
7901 case TRUTH_ANDIF_EXPR:
7902 case TRUTH_ORIF_EXPR:
7903 if (! ignore
7904 && (target == 0 || ! safe_from_p (target, exp, 1)
7905 /* Make sure we don't have a hard reg (such as function's return
7906 value) live across basic blocks, if not optimizing. */
7907 || (!optimize && GET_CODE (target) == REG
7908 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7909 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7911 if (target)
7912 emit_clr_insn (target);
7914 op1 = gen_label_rtx ();
7915 jumpifnot (exp, op1);
7917 if (target)
7918 emit_0_to_1_insn (target);
7920 emit_label (op1);
7921 return ignore ? const0_rtx : target;
7923 case TRUTH_NOT_EXPR:
7924 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7925 /* The parser is careful to generate TRUTH_NOT_EXPR
7926 only with operands that are always zero or one. */
7927 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7928 target, 1, OPTAB_LIB_WIDEN);
7929 if (temp == 0)
7930 abort ();
7931 return temp;
7933 case COMPOUND_EXPR:
7934 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7935 emit_queue ();
7936 return expand_expr (TREE_OPERAND (exp, 1),
7937 (ignore ? const0_rtx : target),
7938 VOIDmode, 0);
7940 case COND_EXPR:
7941 /* If we would have a "singleton" (see below) were it not for a
7942 conversion in each arm, bring that conversion back out. */
7943 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7944 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7945 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7946 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7948 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7949 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7951 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7952 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7953 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7954 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7955 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7956 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7957 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7958 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7959 return expand_expr (build1 (NOP_EXPR, type,
7960 build (COND_EXPR, TREE_TYPE (true),
7961 TREE_OPERAND (exp, 0),
7962 true, false)),
7963 target, tmode, modifier);
7967 /* Note that COND_EXPRs whose type is a structure or union
7968 are required to be constructed to contain assignments of
7969 a temporary variable, so that we can evaluate them here
7970 for side effect only. If type is void, we must do likewise. */
7972 /* If an arm of the branch requires a cleanup,
7973 only that cleanup is performed. */
7975 tree singleton = 0;
7976 tree binary_op = 0, unary_op = 0;
7978 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7979 convert it to our mode, if necessary. */
7980 if (integer_onep (TREE_OPERAND (exp, 1))
7981 && integer_zerop (TREE_OPERAND (exp, 2))
7982 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7984 if (ignore)
7986 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7987 ro_modifier);
7988 return const0_rtx;
7991 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7992 if (GET_MODE (op0) == mode)
7993 return op0;
7995 if (target == 0)
7996 target = gen_reg_rtx (mode);
7997 convert_move (target, op0, unsignedp);
7998 return target;
8001 /* Check for X ? A + B : A. If we have this, we can copy A to the
8002 output and conditionally add B. Similarly for unary operations.
8003 Don't do this if X has side-effects because those side effects
8004 might affect A or B and the "?" operation is a sequence point in
8005 ANSI. (operand_equal_p tests for side effects.) */
8007 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8008 && operand_equal_p (TREE_OPERAND (exp, 2),
8009 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8010 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8011 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8012 && operand_equal_p (TREE_OPERAND (exp, 1),
8013 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8014 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8015 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8016 && operand_equal_p (TREE_OPERAND (exp, 2),
8017 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8018 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8019 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8020 && operand_equal_p (TREE_OPERAND (exp, 1),
8021 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8022 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8024 /* If we are not to produce a result, we have no target. Otherwise,
8025 if a target was specified use it; it will not be used as an
8026 intermediate target unless it is safe. If no target, use a
8027 temporary. */
8029 if (ignore)
8030 temp = 0;
8031 else if (original_target
8032 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8033 || (singleton && GET_CODE (original_target) == REG
8034 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8035 && original_target == var_rtx (singleton)))
8036 && GET_MODE (original_target) == mode
8037 #ifdef HAVE_conditional_move
8038 && (! can_conditionally_move_p (mode)
8039 || GET_CODE (original_target) == REG
8040 || TREE_ADDRESSABLE (type))
8041 #endif
8042 && ! (GET_CODE (original_target) == MEM
8043 && MEM_VOLATILE_P (original_target)))
8044 temp = original_target;
8045 else if (TREE_ADDRESSABLE (type))
8046 abort ();
8047 else
8048 temp = assign_temp (type, 0, 0, 1);
8050 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8051 do the test of X as a store-flag operation, do this as
8052 A + ((X != 0) << log C). Similarly for other simple binary
8053 operators. Only do for C == 1 if BRANCH_COST is low. */
8054 if (temp && singleton && binary_op
8055 && (TREE_CODE (binary_op) == PLUS_EXPR
8056 || TREE_CODE (binary_op) == MINUS_EXPR
8057 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8058 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8059 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8060 : integer_onep (TREE_OPERAND (binary_op, 1)))
8061 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8063 rtx result;
8064 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8065 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8066 ? addv_optab : add_optab)
8067 : TREE_CODE (binary_op) == MINUS_EXPR
8068 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8069 ? subv_optab : sub_optab)
8070 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8071 : xor_optab);
8073 /* If we had X ? A : A + 1, do this as A + (X == 0).
8075 We have to invert the truth value here and then put it
8076 back later if do_store_flag fails. We cannot simply copy
8077 TREE_OPERAND (exp, 0) to another variable and modify that
8078 because invert_truthvalue can modify the tree pointed to
8079 by its argument. */
8080 if (singleton == TREE_OPERAND (exp, 1))
8081 TREE_OPERAND (exp, 0)
8082 = invert_truthvalue (TREE_OPERAND (exp, 0));
8084 result = do_store_flag (TREE_OPERAND (exp, 0),
8085 (safe_from_p (temp, singleton, 1)
8086 ? temp : NULL_RTX),
8087 mode, BRANCH_COST <= 1);
8089 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8090 result = expand_shift (LSHIFT_EXPR, mode, result,
8091 build_int_2 (tree_log2
8092 (TREE_OPERAND
8093 (binary_op, 1)),
8095 (safe_from_p (temp, singleton, 1)
8096 ? temp : NULL_RTX), 0);
8098 if (result)
8100 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8101 return expand_binop (mode, boptab, op1, result, temp,
8102 unsignedp, OPTAB_LIB_WIDEN);
8104 else if (singleton == TREE_OPERAND (exp, 1))
8105 TREE_OPERAND (exp, 0)
8106 = invert_truthvalue (TREE_OPERAND (exp, 0));
8109 do_pending_stack_adjust ();
8110 NO_DEFER_POP;
8111 op0 = gen_label_rtx ();
8113 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8115 if (temp != 0)
8117 /* If the target conflicts with the other operand of the
8118 binary op, we can't use it. Also, we can't use the target
8119 if it is a hard register, because evaluating the condition
8120 might clobber it. */
8121 if ((binary_op
8122 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8123 || (GET_CODE (temp) == REG
8124 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8125 temp = gen_reg_rtx (mode);
8126 store_expr (singleton, temp, 0);
8128 else
8129 expand_expr (singleton,
8130 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8131 if (singleton == TREE_OPERAND (exp, 1))
8132 jumpif (TREE_OPERAND (exp, 0), op0);
8133 else
8134 jumpifnot (TREE_OPERAND (exp, 0), op0);
8136 start_cleanup_deferral ();
8137 if (binary_op && temp == 0)
8138 /* Just touch the other operand. */
8139 expand_expr (TREE_OPERAND (binary_op, 1),
8140 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8141 else if (binary_op)
8142 store_expr (build (TREE_CODE (binary_op), type,
8143 make_tree (type, temp),
8144 TREE_OPERAND (binary_op, 1)),
8145 temp, 0);
8146 else
8147 store_expr (build1 (TREE_CODE (unary_op), type,
8148 make_tree (type, temp)),
8149 temp, 0);
8150 op1 = op0;
8152 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8153 comparison operator. If we have one of these cases, set the
8154 output to A, branch on A (cse will merge these two references),
8155 then set the output to FOO. */
8156 else if (temp
8157 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8158 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8159 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8160 TREE_OPERAND (exp, 1), 0)
8161 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8162 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8163 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8165 if (GET_CODE (temp) == REG
8166 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8167 temp = gen_reg_rtx (mode);
8168 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8169 jumpif (TREE_OPERAND (exp, 0), op0);
8171 start_cleanup_deferral ();
8172 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8173 op1 = op0;
8175 else if (temp
8176 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8177 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8178 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8179 TREE_OPERAND (exp, 2), 0)
8180 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8181 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8182 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8184 if (GET_CODE (temp) == REG
8185 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8186 temp = gen_reg_rtx (mode);
8187 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8188 jumpifnot (TREE_OPERAND (exp, 0), op0);
8190 start_cleanup_deferral ();
8191 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8192 op1 = op0;
8194 else
8196 op1 = gen_label_rtx ();
8197 jumpifnot (TREE_OPERAND (exp, 0), op0);
8199 start_cleanup_deferral ();
8201 /* One branch of the cond can be void, if it never returns. For
8202 example A ? throw : E */
8203 if (temp != 0
8204 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8205 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8206 else
8207 expand_expr (TREE_OPERAND (exp, 1),
8208 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8209 end_cleanup_deferral ();
8210 emit_queue ();
8211 emit_jump_insn (gen_jump (op1));
8212 emit_barrier ();
8213 emit_label (op0);
8214 start_cleanup_deferral ();
8215 if (temp != 0
8216 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8217 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8218 else
8219 expand_expr (TREE_OPERAND (exp, 2),
8220 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8223 end_cleanup_deferral ();
8225 emit_queue ();
8226 emit_label (op1);
8227 OK_DEFER_POP;
8229 return temp;
8232 case TARGET_EXPR:
8234 /* Something needs to be initialized, but we didn't know
8235 where that thing was when building the tree. For example,
8236 it could be the return value of a function, or a parameter
8237 to a function which lays down in the stack, or a temporary
8238 variable which must be passed by reference.
8240 We guarantee that the expression will either be constructed
8241 or copied into our original target. */
8243 tree slot = TREE_OPERAND (exp, 0);
8244 tree cleanups = NULL_TREE;
8245 tree exp1;
8247 if (TREE_CODE (slot) != VAR_DECL)
8248 abort ();
8250 if (! ignore)
8251 target = original_target;
8253 /* Set this here so that if we get a target that refers to a
8254 register variable that's already been used, put_reg_into_stack
8255 knows that it should fix up those uses. */
8256 TREE_USED (slot) = 1;
8258 if (target == 0)
8260 if (DECL_RTL (slot) != 0)
8262 target = DECL_RTL (slot);
8263 /* If we have already expanded the slot, so don't do
8264 it again. (mrs) */
8265 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8266 return target;
8268 else
8270 target = assign_temp (type, 2, 0, 1);
8271 /* All temp slots at this level must not conflict. */
8272 preserve_temp_slots (target);
8273 DECL_RTL (slot) = target;
8274 if (TREE_ADDRESSABLE (slot))
8275 put_var_into_stack (slot);
8277 /* Since SLOT is not known to the called function
8278 to belong to its stack frame, we must build an explicit
8279 cleanup. This case occurs when we must build up a reference
8280 to pass the reference as an argument. In this case,
8281 it is very likely that such a reference need not be
8282 built here. */
8284 if (TREE_OPERAND (exp, 2) == 0)
8285 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8286 cleanups = TREE_OPERAND (exp, 2);
8289 else
8291 /* This case does occur, when expanding a parameter which
8292 needs to be constructed on the stack. The target
8293 is the actual stack address that we want to initialize.
8294 The function we call will perform the cleanup in this case. */
8296 /* If we have already assigned it space, use that space,
8297 not target that we were passed in, as our target
8298 parameter is only a hint. */
8299 if (DECL_RTL (slot) != 0)
8301 target = DECL_RTL (slot);
8302 /* If we have already expanded the slot, so don't do
8303 it again. (mrs) */
8304 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8305 return target;
8307 else
8309 DECL_RTL (slot) = target;
8310 /* If we must have an addressable slot, then make sure that
8311 the RTL that we just stored in slot is OK. */
8312 if (TREE_ADDRESSABLE (slot))
8313 put_var_into_stack (slot);
8317 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8318 /* Mark it as expanded. */
8319 TREE_OPERAND (exp, 1) = NULL_TREE;
8321 store_expr (exp1, target, 0);
8323 expand_decl_cleanup (NULL_TREE, cleanups);
8325 return target;
8328 case INIT_EXPR:
8330 tree lhs = TREE_OPERAND (exp, 0);
8331 tree rhs = TREE_OPERAND (exp, 1);
8332 tree noncopied_parts = 0;
8333 tree lhs_type = TREE_TYPE (lhs);
8335 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8336 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8337 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8338 TYPE_NONCOPIED_PARTS (lhs_type));
8339 while (noncopied_parts != 0)
8341 expand_assignment (TREE_VALUE (noncopied_parts),
8342 TREE_PURPOSE (noncopied_parts), 0, 0);
8343 noncopied_parts = TREE_CHAIN (noncopied_parts);
8345 return temp;
8348 case MODIFY_EXPR:
8350 /* If lhs is complex, expand calls in rhs before computing it.
8351 That's so we don't compute a pointer and save it over a call.
8352 If lhs is simple, compute it first so we can give it as a
8353 target if the rhs is just a call. This avoids an extra temp and copy
8354 and that prevents a partial-subsumption which makes bad code.
8355 Actually we could treat component_ref's of vars like vars. */
8357 tree lhs = TREE_OPERAND (exp, 0);
8358 tree rhs = TREE_OPERAND (exp, 1);
8359 tree noncopied_parts = 0;
8360 tree lhs_type = TREE_TYPE (lhs);
8362 temp = 0;
8364 if (TREE_CODE (lhs) != VAR_DECL
8365 && TREE_CODE (lhs) != RESULT_DECL
8366 && TREE_CODE (lhs) != PARM_DECL
8367 && ! (TREE_CODE (lhs) == INDIRECT_REF
8368 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
8370 /* Check for |= or &= of a bitfield of size one into another bitfield
8371 of size 1. In this case, (unless we need the result of the
8372 assignment) we can do this more efficiently with a
8373 test followed by an assignment, if necessary.
8375 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8376 things change so we do, this code should be enhanced to
8377 support it. */
8378 if (ignore
8379 && TREE_CODE (lhs) == COMPONENT_REF
8380 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8381 || TREE_CODE (rhs) == BIT_AND_EXPR)
8382 && TREE_OPERAND (rhs, 0) == lhs
8383 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8384 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8385 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8387 rtx label = gen_label_rtx ();
8389 do_jump (TREE_OPERAND (rhs, 1),
8390 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8391 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8392 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8393 (TREE_CODE (rhs) == BIT_IOR_EXPR
8394 ? integer_one_node
8395 : integer_zero_node)),
8396 0, 0);
8397 do_pending_stack_adjust ();
8398 emit_label (label);
8399 return const0_rtx;
8402 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8403 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8404 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8405 TYPE_NONCOPIED_PARTS (lhs_type));
8407 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8408 while (noncopied_parts != 0)
8410 expand_assignment (TREE_PURPOSE (noncopied_parts),
8411 TREE_VALUE (noncopied_parts), 0, 0);
8412 noncopied_parts = TREE_CHAIN (noncopied_parts);
8414 return temp;
8417 case RETURN_EXPR:
8418 if (!TREE_OPERAND (exp, 0))
8419 expand_null_return ();
8420 else
8421 expand_return (TREE_OPERAND (exp, 0));
8422 return const0_rtx;
8424 case PREINCREMENT_EXPR:
8425 case PREDECREMENT_EXPR:
8426 return expand_increment (exp, 0, ignore);
8428 case POSTINCREMENT_EXPR:
8429 case POSTDECREMENT_EXPR:
8430 /* Faster to treat as pre-increment if result is not used. */
8431 return expand_increment (exp, ! ignore, ignore);
8433 case ADDR_EXPR:
8434 /* If nonzero, TEMP will be set to the address of something that might
8435 be a MEM corresponding to a stack slot. */
8436 temp = 0;
8438 /* Are we taking the address of a nested function? */
8439 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8440 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8441 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8442 && ! TREE_STATIC (exp))
8444 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8445 op0 = force_operand (op0, target);
8447 /* If we are taking the address of something erroneous, just
8448 return a zero. */
8449 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8450 return const0_rtx;
8451 else
8453 /* We make sure to pass const0_rtx down if we came in with
8454 ignore set, to avoid doing the cleanups twice for something. */
8455 op0 = expand_expr (TREE_OPERAND (exp, 0),
8456 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8457 (modifier == EXPAND_INITIALIZER
8458 ? modifier : EXPAND_CONST_ADDRESS));
8460 /* If we are going to ignore the result, OP0 will have been set
8461 to const0_rtx, so just return it. Don't get confused and
8462 think we are taking the address of the constant. */
8463 if (ignore)
8464 return op0;
8466 op0 = protect_from_queue (op0, 0);
8468 /* We would like the object in memory. If it is a constant, we can
8469 have it be statically allocated into memory. For a non-constant,
8470 we need to allocate some memory and store the value into it. */
8472 if (CONSTANT_P (op0))
8473 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8474 op0);
8475 else if (GET_CODE (op0) == MEM)
8477 mark_temp_addr_taken (op0);
8478 temp = XEXP (op0, 0);
8481 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8482 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8483 || GET_CODE (op0) == PARALLEL)
8485 /* If this object is in a register, it must be not
8486 be BLKmode. */
8487 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8488 tree nt = build_qualified_type (inner_type,
8489 (TYPE_QUALS (inner_type)
8490 | TYPE_QUAL_CONST));
8491 rtx memloc = assign_temp (nt, 1, 1, 1);
8493 mark_temp_addr_taken (memloc);
8494 if (GET_CODE (op0) == PARALLEL)
8495 /* Handle calls that pass values in multiple non-contiguous
8496 locations. The Irix 6 ABI has examples of this. */
8497 emit_group_store (memloc, op0,
8498 int_size_in_bytes (inner_type),
8499 TYPE_ALIGN (inner_type));
8500 else
8501 emit_move_insn (memloc, op0);
8502 op0 = memloc;
8505 if (GET_CODE (op0) != MEM)
8506 abort ();
8508 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8510 temp = XEXP (op0, 0);
8511 #ifdef POINTERS_EXTEND_UNSIGNED
8512 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8513 && mode == ptr_mode)
8514 temp = convert_memory_address (ptr_mode, temp);
8515 #endif
8516 return temp;
8519 op0 = force_operand (XEXP (op0, 0), target);
8522 if (flag_force_addr && GET_CODE (op0) != REG)
8523 op0 = force_reg (Pmode, op0);
8525 if (GET_CODE (op0) == REG
8526 && ! REG_USERVAR_P (op0))
8527 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8529 /* If we might have had a temp slot, add an equivalent address
8530 for it. */
8531 if (temp != 0)
8532 update_temp_slot_address (temp, op0);
8534 #ifdef POINTERS_EXTEND_UNSIGNED
8535 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8536 && mode == ptr_mode)
8537 op0 = convert_memory_address (ptr_mode, op0);
8538 #endif
8540 return op0;
8542 case ENTRY_VALUE_EXPR:
8543 abort ();
8545 /* COMPLEX type for Extended Pascal & Fortran */
8546 case COMPLEX_EXPR:
8548 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8549 rtx insns;
8551 /* Get the rtx code of the operands. */
8552 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8553 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8555 if (! target)
8556 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8558 start_sequence ();
8560 /* Move the real (op0) and imaginary (op1) parts to their location. */
8561 emit_move_insn (gen_realpart (mode, target), op0);
8562 emit_move_insn (gen_imagpart (mode, target), op1);
8564 insns = get_insns ();
8565 end_sequence ();
8567 /* Complex construction should appear as a single unit. */
8568 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8569 each with a separate pseudo as destination.
8570 It's not correct for flow to treat them as a unit. */
8571 if (GET_CODE (target) != CONCAT)
8572 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8573 else
8574 emit_insns (insns);
8576 return target;
8579 case REALPART_EXPR:
8580 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8581 return gen_realpart (mode, op0);
8583 case IMAGPART_EXPR:
8584 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8585 return gen_imagpart (mode, op0);
8587 case CONJ_EXPR:
8589 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8590 rtx imag_t;
8591 rtx insns;
8593 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8595 if (! target)
8596 target = gen_reg_rtx (mode);
8598 start_sequence ();
8600 /* Store the realpart and the negated imagpart to target. */
8601 emit_move_insn (gen_realpart (partmode, target),
8602 gen_realpart (partmode, op0));
8604 imag_t = gen_imagpart (partmode, target);
8605 temp = expand_unop (partmode,
8606 ! unsignedp && flag_trapv
8607 && (GET_MODE_CLASS(partmode) == MODE_INT)
8608 ? negv_optab : neg_optab,
8609 gen_imagpart (partmode, op0), imag_t, 0);
8610 if (temp != imag_t)
8611 emit_move_insn (imag_t, temp);
8613 insns = get_insns ();
8614 end_sequence ();
8616 /* Conjugate should appear as a single unit
8617 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8618 each with a separate pseudo as destination.
8619 It's not correct for flow to treat them as a unit. */
8620 if (GET_CODE (target) != CONCAT)
8621 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8622 else
8623 emit_insns (insns);
8625 return target;
8628 case TRY_CATCH_EXPR:
8630 tree handler = TREE_OPERAND (exp, 1);
8632 expand_eh_region_start ();
8634 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8636 expand_eh_region_end (handler);
8638 return op0;
8641 case TRY_FINALLY_EXPR:
8643 tree try_block = TREE_OPERAND (exp, 0);
8644 tree finally_block = TREE_OPERAND (exp, 1);
8645 rtx finally_label = gen_label_rtx ();
8646 rtx done_label = gen_label_rtx ();
8647 rtx return_link = gen_reg_rtx (Pmode);
8648 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8649 (tree) finally_label, (tree) return_link);
8650 TREE_SIDE_EFFECTS (cleanup) = 1;
8652 /* Start a new binding layer that will keep track of all cleanup
8653 actions to be performed. */
8654 expand_start_bindings (2);
8656 target_temp_slot_level = temp_slot_level;
8658 expand_decl_cleanup (NULL_TREE, cleanup);
8659 op0 = expand_expr (try_block, target, tmode, modifier);
8661 preserve_temp_slots (op0);
8662 expand_end_bindings (NULL_TREE, 0, 0);
8663 emit_jump (done_label);
8664 emit_label (finally_label);
8665 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8666 emit_indirect_jump (return_link);
8667 emit_label (done_label);
8668 return op0;
8671 case GOTO_SUBROUTINE_EXPR:
8673 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8674 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8675 rtx return_address = gen_label_rtx ();
8676 emit_move_insn (return_link,
8677 gen_rtx_LABEL_REF (Pmode, return_address));
8678 emit_jump (subr);
8679 emit_label (return_address);
8680 return const0_rtx;
8683 case POPDCC_EXPR:
8685 rtx dcc = get_dynamic_cleanup_chain ();
8686 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8687 return const0_rtx;
8690 case POPDHC_EXPR:
8692 rtx dhc = get_dynamic_handler_chain ();
8693 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8694 return const0_rtx;
8697 case VA_ARG_EXPR:
8698 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8700 default:
8701 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8704 /* Here to do an ordinary binary operator, generating an instruction
8705 from the optab already placed in `this_optab'. */
8706 binop:
8707 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8708 subtarget = 0;
8709 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8710 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8711 binop2:
8712 temp = expand_binop (mode, this_optab, op0, op1, target,
8713 unsignedp, OPTAB_LIB_WIDEN);
8714 if (temp == 0)
8715 abort ();
8716 return temp;
8719 /* Similar to expand_expr, except that we don't specify a target, target
8720 mode, or modifier and we return the alignment of the inner type. This is
8721 used in cases where it is not necessary to align the result to the
8722 alignment of its type as long as we know the alignment of the result, for
8723 example for comparisons of BLKmode values. */
8725 static rtx
8726 expand_expr_unaligned (exp, palign)
8727 register tree exp;
8728 unsigned int *palign;
8730 register rtx op0;
8731 tree type = TREE_TYPE (exp);
8732 register enum machine_mode mode = TYPE_MODE (type);
8734 /* Default the alignment we return to that of the type. */
8735 *palign = TYPE_ALIGN (type);
8737 /* The only cases in which we do anything special is if the resulting mode
8738 is BLKmode. */
8739 if (mode != BLKmode)
8740 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8742 switch (TREE_CODE (exp))
8744 case CONVERT_EXPR:
8745 case NOP_EXPR:
8746 case NON_LVALUE_EXPR:
8747 /* Conversions between BLKmode values don't change the underlying
8748 alignment or value. */
8749 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8750 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8751 break;
8753 case ARRAY_REF:
8754 /* Much of the code for this case is copied directly from expand_expr.
8755 We need to duplicate it here because we will do something different
8756 in the fall-through case, so we need to handle the same exceptions
8757 it does. */
8759 tree array = TREE_OPERAND (exp, 0);
8760 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8761 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8762 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8763 HOST_WIDE_INT i;
8765 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8766 abort ();
8768 /* Optimize the special-case of a zero lower bound.
8770 We convert the low_bound to sizetype to avoid some problems
8771 with constant folding. (E.g. suppose the lower bound is 1,
8772 and its mode is QI. Without the conversion, (ARRAY
8773 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8774 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8776 if (! integer_zerop (low_bound))
8777 index = size_diffop (index, convert (sizetype, low_bound));
8779 /* If this is a constant index into a constant array,
8780 just get the value from the array. Handle both the cases when
8781 we have an explicit constructor and when our operand is a variable
8782 that was declared const. */
8784 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8785 && host_integerp (index, 0)
8786 && 0 > compare_tree_int (index,
8787 list_length (CONSTRUCTOR_ELTS
8788 (TREE_OPERAND (exp, 0)))))
8790 tree elem;
8792 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8793 i = tree_low_cst (index, 0);
8794 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8797 if (elem)
8798 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8801 else if (optimize >= 1
8802 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8803 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8804 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8806 if (TREE_CODE (index) == INTEGER_CST)
8808 tree init = DECL_INITIAL (array);
8810 if (TREE_CODE (init) == CONSTRUCTOR)
8812 tree elem;
8814 for (elem = CONSTRUCTOR_ELTS (init);
8815 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8816 elem = TREE_CHAIN (elem))
8819 if (elem)
8820 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8821 palign);
8826 /* Fall through. */
8828 case COMPONENT_REF:
8829 case BIT_FIELD_REF:
8830 /* If the operand is a CONSTRUCTOR, we can just extract the
8831 appropriate field if it is present. Don't do this if we have
8832 already written the data since we want to refer to that copy
8833 and varasm.c assumes that's what we'll do. */
8834 if (TREE_CODE (exp) != ARRAY_REF
8835 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8836 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8838 tree elt;
8840 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8841 elt = TREE_CHAIN (elt))
8842 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8843 /* Note that unlike the case in expand_expr, we know this is
8844 BLKmode and hence not an integer. */
8845 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8849 enum machine_mode mode1;
8850 HOST_WIDE_INT bitsize, bitpos;
8851 tree offset;
8852 int volatilep = 0;
8853 unsigned int alignment;
8854 int unsignedp;
8855 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8856 &mode1, &unsignedp, &volatilep,
8857 &alignment);
8859 /* If we got back the original object, something is wrong. Perhaps
8860 we are evaluating an expression too early. In any event, don't
8861 infinitely recurse. */
8862 if (tem == exp)
8863 abort ();
8865 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8867 /* If this is a constant, put it into a register if it is a
8868 legitimate constant and OFFSET is 0 and memory if it isn't. */
8869 if (CONSTANT_P (op0))
8871 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8873 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8874 && offset == 0)
8875 op0 = force_reg (inner_mode, op0);
8876 else
8877 op0 = validize_mem (force_const_mem (inner_mode, op0));
8880 if (offset != 0)
8882 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8884 /* If this object is in a register, put it into memory.
8885 This case can't occur in C, but can in Ada if we have
8886 unchecked conversion of an expression from a scalar type to
8887 an array or record type. */
8888 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8889 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8891 tree nt = build_qualified_type (TREE_TYPE (tem),
8892 (TYPE_QUALS (TREE_TYPE (tem))
8893 | TYPE_QUAL_CONST));
8894 rtx memloc = assign_temp (nt, 1, 1, 1);
8896 mark_temp_addr_taken (memloc);
8897 emit_move_insn (memloc, op0);
8898 op0 = memloc;
8901 if (GET_CODE (op0) != MEM)
8902 abort ();
8904 if (GET_MODE (offset_rtx) != ptr_mode)
8906 #ifdef POINTERS_EXTEND_UNSIGNED
8907 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8908 #else
8909 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8910 #endif
8913 op0 = change_address (op0, VOIDmode,
8914 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
8915 force_reg (ptr_mode,
8916 offset_rtx)));
8919 /* Don't forget about volatility even if this is a bitfield. */
8920 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
8922 op0 = copy_rtx (op0);
8923 MEM_VOLATILE_P (op0) = 1;
8926 /* Check the access. */
8927 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
8929 rtx to;
8930 int size;
8932 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
8933 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
8935 /* Check the access right of the pointer. */
8936 in_check_memory_usage = 1;
8937 if (size > BITS_PER_UNIT)
8938 emit_library_call (chkr_check_addr_libfunc,
8939 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
8940 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
8941 TYPE_MODE (sizetype),
8942 GEN_INT (MEMORY_USE_RO),
8943 TYPE_MODE (integer_type_node));
8944 in_check_memory_usage = 0;
8947 /* In cases where an aligned union has an unaligned object
8948 as a field, we might be extracting a BLKmode value from
8949 an integer-mode (e.g., SImode) object. Handle this case
8950 by doing the extract into an object as wide as the field
8951 (which we know to be the width of a basic mode), then
8952 storing into memory, and changing the mode to BLKmode.
8953 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8954 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8955 if (mode1 == VOIDmode
8956 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8957 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
8958 && (TYPE_ALIGN (type) > alignment
8959 || bitpos % TYPE_ALIGN (type) != 0)))
8961 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
8963 if (ext_mode == BLKmode)
8965 /* In this case, BITPOS must start at a byte boundary. */
8966 if (GET_CODE (op0) != MEM
8967 || bitpos % BITS_PER_UNIT != 0)
8968 abort ();
8970 op0 = change_address (op0, VOIDmode,
8971 plus_constant (XEXP (op0, 0),
8972 bitpos / BITS_PER_UNIT));
8974 else
8976 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
8977 TYPE_QUAL_CONST);
8978 rtx new = assign_temp (nt, 0, 1, 1);
8980 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
8981 unsignedp, NULL_RTX, ext_mode,
8982 ext_mode, alignment,
8983 int_size_in_bytes (TREE_TYPE (tem)));
8985 /* If the result is a record type and BITSIZE is narrower than
8986 the mode of OP0, an integral mode, and this is a big endian
8987 machine, we must put the field into the high-order bits. */
8988 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
8989 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8990 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
8991 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
8992 size_int (GET_MODE_BITSIZE
8993 (GET_MODE (op0))
8994 - bitsize),
8995 op0, 1);
8997 emit_move_insn (new, op0);
8998 op0 = copy_rtx (new);
8999 PUT_MODE (op0, BLKmode);
9002 else
9003 /* Get a reference to just this component. */
9004 op0 = change_address (op0, mode1,
9005 plus_constant (XEXP (op0, 0),
9006 (bitpos / BITS_PER_UNIT)));
9008 MEM_ALIAS_SET (op0) = get_alias_set (exp);
9010 /* Adjust the alignment in case the bit position is not
9011 a multiple of the alignment of the inner object. */
9012 while (bitpos % alignment != 0)
9013 alignment >>= 1;
9015 if (GET_CODE (XEXP (op0, 0)) == REG)
9016 mark_reg_pointer (XEXP (op0, 0), alignment);
9018 MEM_IN_STRUCT_P (op0) = 1;
9019 MEM_VOLATILE_P (op0) |= volatilep;
9021 *palign = alignment;
9022 return op0;
9025 default:
9026 break;
9030 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9033 /* Return the tree node if a ARG corresponds to a string constant or zero
9034 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9035 in bytes within the string that ARG is accessing. The type of the
9036 offset will be `sizetype'. */
9038 tree
9039 string_constant (arg, ptr_offset)
9040 tree arg;
9041 tree *ptr_offset;
9043 STRIP_NOPS (arg);
9045 if (TREE_CODE (arg) == ADDR_EXPR
9046 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9048 *ptr_offset = size_zero_node;
9049 return TREE_OPERAND (arg, 0);
9051 else if (TREE_CODE (arg) == PLUS_EXPR)
9053 tree arg0 = TREE_OPERAND (arg, 0);
9054 tree arg1 = TREE_OPERAND (arg, 1);
9056 STRIP_NOPS (arg0);
9057 STRIP_NOPS (arg1);
9059 if (TREE_CODE (arg0) == ADDR_EXPR
9060 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9062 *ptr_offset = convert (sizetype, arg1);
9063 return TREE_OPERAND (arg0, 0);
9065 else if (TREE_CODE (arg1) == ADDR_EXPR
9066 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9068 *ptr_offset = convert (sizetype, arg0);
9069 return TREE_OPERAND (arg1, 0);
9073 return 0;
9076 /* Expand code for a post- or pre- increment or decrement
9077 and return the RTX for the result.
9078 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9080 static rtx
9081 expand_increment (exp, post, ignore)
9082 register tree exp;
9083 int post, ignore;
9085 register rtx op0, op1;
9086 register rtx temp, value;
9087 register tree incremented = TREE_OPERAND (exp, 0);
9088 optab this_optab = add_optab;
9089 int icode;
9090 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9091 int op0_is_copy = 0;
9092 int single_insn = 0;
9093 /* 1 means we can't store into OP0 directly,
9094 because it is a subreg narrower than a word,
9095 and we don't dare clobber the rest of the word. */
9096 int bad_subreg = 0;
9098 /* Stabilize any component ref that might need to be
9099 evaluated more than once below. */
9100 if (!post
9101 || TREE_CODE (incremented) == BIT_FIELD_REF
9102 || (TREE_CODE (incremented) == COMPONENT_REF
9103 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9104 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9105 incremented = stabilize_reference (incremented);
9106 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9107 ones into save exprs so that they don't accidentally get evaluated
9108 more than once by the code below. */
9109 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9110 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9111 incremented = save_expr (incremented);
9113 /* Compute the operands as RTX.
9114 Note whether OP0 is the actual lvalue or a copy of it:
9115 I believe it is a copy iff it is a register or subreg
9116 and insns were generated in computing it. */
9118 temp = get_last_insn ();
9119 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9121 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9122 in place but instead must do sign- or zero-extension during assignment,
9123 so we copy it into a new register and let the code below use it as
9124 a copy.
9126 Note that we can safely modify this SUBREG since it is know not to be
9127 shared (it was made by the expand_expr call above). */
9129 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9131 if (post)
9132 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9133 else
9134 bad_subreg = 1;
9136 else if (GET_CODE (op0) == SUBREG
9137 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9139 /* We cannot increment this SUBREG in place. If we are
9140 post-incrementing, get a copy of the old value. Otherwise,
9141 just mark that we cannot increment in place. */
9142 if (post)
9143 op0 = copy_to_reg (op0);
9144 else
9145 bad_subreg = 1;
9148 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9149 && temp != get_last_insn ());
9150 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9151 EXPAND_MEMORY_USE_BAD);
9153 /* Decide whether incrementing or decrementing. */
9154 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9155 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9156 this_optab = sub_optab;
9158 /* Convert decrement by a constant into a negative increment. */
9159 if (this_optab == sub_optab
9160 && GET_CODE (op1) == CONST_INT)
9162 op1 = GEN_INT (-INTVAL (op1));
9163 this_optab = add_optab;
9166 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9167 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9169 /* For a preincrement, see if we can do this with a single instruction. */
9170 if (!post)
9172 icode = (int) this_optab->handlers[(int) mode].insn_code;
9173 if (icode != (int) CODE_FOR_nothing
9174 /* Make sure that OP0 is valid for operands 0 and 1
9175 of the insn we want to queue. */
9176 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9177 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9178 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9179 single_insn = 1;
9182 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9183 then we cannot just increment OP0. We must therefore contrive to
9184 increment the original value. Then, for postincrement, we can return
9185 OP0 since it is a copy of the old value. For preincrement, expand here
9186 unless we can do it with a single insn.
9188 Likewise if storing directly into OP0 would clobber high bits
9189 we need to preserve (bad_subreg). */
9190 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9192 /* This is the easiest way to increment the value wherever it is.
9193 Problems with multiple evaluation of INCREMENTED are prevented
9194 because either (1) it is a component_ref or preincrement,
9195 in which case it was stabilized above, or (2) it is an array_ref
9196 with constant index in an array in a register, which is
9197 safe to reevaluate. */
9198 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9199 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9200 ? MINUS_EXPR : PLUS_EXPR),
9201 TREE_TYPE (exp),
9202 incremented,
9203 TREE_OPERAND (exp, 1));
9205 while (TREE_CODE (incremented) == NOP_EXPR
9206 || TREE_CODE (incremented) == CONVERT_EXPR)
9208 newexp = convert (TREE_TYPE (incremented), newexp);
9209 incremented = TREE_OPERAND (incremented, 0);
9212 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9213 return post ? op0 : temp;
9216 if (post)
9218 /* We have a true reference to the value in OP0.
9219 If there is an insn to add or subtract in this mode, queue it.
9220 Queueing the increment insn avoids the register shuffling
9221 that often results if we must increment now and first save
9222 the old value for subsequent use. */
9224 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9225 op0 = stabilize (op0);
9226 #endif
9228 icode = (int) this_optab->handlers[(int) mode].insn_code;
9229 if (icode != (int) CODE_FOR_nothing
9230 /* Make sure that OP0 is valid for operands 0 and 1
9231 of the insn we want to queue. */
9232 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9233 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9235 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9236 op1 = force_reg (mode, op1);
9238 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9240 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9242 rtx addr = (general_operand (XEXP (op0, 0), mode)
9243 ? force_reg (Pmode, XEXP (op0, 0))
9244 : copy_to_reg (XEXP (op0, 0)));
9245 rtx temp, result;
9247 op0 = change_address (op0, VOIDmode, addr);
9248 temp = force_reg (GET_MODE (op0), op0);
9249 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9250 op1 = force_reg (mode, op1);
9252 /* The increment queue is LIFO, thus we have to `queue'
9253 the instructions in reverse order. */
9254 enqueue_insn (op0, gen_move_insn (op0, temp));
9255 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9256 return result;
9260 /* Preincrement, or we can't increment with one simple insn. */
9261 if (post)
9262 /* Save a copy of the value before inc or dec, to return it later. */
9263 temp = value = copy_to_reg (op0);
9264 else
9265 /* Arrange to return the incremented value. */
9266 /* Copy the rtx because expand_binop will protect from the queue,
9267 and the results of that would be invalid for us to return
9268 if our caller does emit_queue before using our result. */
9269 temp = copy_rtx (value = op0);
9271 /* Increment however we can. */
9272 op1 = expand_binop (mode, this_optab, value, op1,
9273 current_function_check_memory_usage ? NULL_RTX : op0,
9274 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9275 /* Make sure the value is stored into OP0. */
9276 if (op1 != op0)
9277 emit_move_insn (op0, op1);
9279 return temp;
9282 /* At the start of a function, record that we have no previously-pushed
9283 arguments waiting to be popped. */
9285 void
9286 init_pending_stack_adjust ()
9288 pending_stack_adjust = 0;
9291 /* When exiting from function, if safe, clear out any pending stack adjust
9292 so the adjustment won't get done.
9294 Note, if the current function calls alloca, then it must have a
9295 frame pointer regardless of the value of flag_omit_frame_pointer. */
9297 void
9298 clear_pending_stack_adjust ()
9300 #ifdef EXIT_IGNORE_STACK
9301 if (optimize > 0
9302 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9303 && EXIT_IGNORE_STACK
9304 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9305 && ! flag_inline_functions)
9307 stack_pointer_delta -= pending_stack_adjust,
9308 pending_stack_adjust = 0;
9310 #endif
9313 /* Pop any previously-pushed arguments that have not been popped yet. */
9315 void
9316 do_pending_stack_adjust ()
9318 if (inhibit_defer_pop == 0)
9320 if (pending_stack_adjust != 0)
9321 adjust_stack (GEN_INT (pending_stack_adjust));
9322 pending_stack_adjust = 0;
9326 /* Expand conditional expressions. */
9328 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9329 LABEL is an rtx of code CODE_LABEL, in this function and all the
9330 functions here. */
9332 void
9333 jumpifnot (exp, label)
9334 tree exp;
9335 rtx label;
9337 do_jump (exp, label, NULL_RTX);
9340 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9342 void
9343 jumpif (exp, label)
9344 tree exp;
9345 rtx label;
9347 do_jump (exp, NULL_RTX, label);
9350 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9351 the result is zero, or IF_TRUE_LABEL if the result is one.
9352 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9353 meaning fall through in that case.
9355 do_jump always does any pending stack adjust except when it does not
9356 actually perform a jump. An example where there is no jump
9357 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9359 This function is responsible for optimizing cases such as
9360 &&, || and comparison operators in EXP. */
9362 void
9363 do_jump (exp, if_false_label, if_true_label)
9364 tree exp;
9365 rtx if_false_label, if_true_label;
9367 register enum tree_code code = TREE_CODE (exp);
9368 /* Some cases need to create a label to jump to
9369 in order to properly fall through.
9370 These cases set DROP_THROUGH_LABEL nonzero. */
9371 rtx drop_through_label = 0;
9372 rtx temp;
9373 int i;
9374 tree type;
9375 enum machine_mode mode;
9377 #ifdef MAX_INTEGER_COMPUTATION_MODE
9378 check_max_integer_computation_mode (exp);
9379 #endif
9381 emit_queue ();
9383 switch (code)
9385 case ERROR_MARK:
9386 break;
9388 case INTEGER_CST:
9389 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9390 if (temp)
9391 emit_jump (temp);
9392 break;
9394 #if 0
9395 /* This is not true with #pragma weak */
9396 case ADDR_EXPR:
9397 /* The address of something can never be zero. */
9398 if (if_true_label)
9399 emit_jump (if_true_label);
9400 break;
9401 #endif
9403 case NOP_EXPR:
9404 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9405 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9406 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9407 goto normal;
9408 case CONVERT_EXPR:
9409 /* If we are narrowing the operand, we have to do the compare in the
9410 narrower mode. */
9411 if ((TYPE_PRECISION (TREE_TYPE (exp))
9412 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9413 goto normal;
9414 case NON_LVALUE_EXPR:
9415 case REFERENCE_EXPR:
9416 case ABS_EXPR:
9417 case NEGATE_EXPR:
9418 case LROTATE_EXPR:
9419 case RROTATE_EXPR:
9420 /* These cannot change zero->non-zero or vice versa. */
9421 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9422 break;
9424 case WITH_RECORD_EXPR:
9425 /* Put the object on the placeholder list, recurse through our first
9426 operand, and pop the list. */
9427 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9428 placeholder_list);
9429 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9430 placeholder_list = TREE_CHAIN (placeholder_list);
9431 break;
9433 #if 0
9434 /* This is never less insns than evaluating the PLUS_EXPR followed by
9435 a test and can be longer if the test is eliminated. */
9436 case PLUS_EXPR:
9437 /* Reduce to minus. */
9438 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9439 TREE_OPERAND (exp, 0),
9440 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9441 TREE_OPERAND (exp, 1))));
9442 /* Process as MINUS. */
9443 #endif
9445 case MINUS_EXPR:
9446 /* Non-zero iff operands of minus differ. */
9447 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9448 TREE_OPERAND (exp, 0),
9449 TREE_OPERAND (exp, 1)),
9450 NE, NE, if_false_label, if_true_label);
9451 break;
9453 case BIT_AND_EXPR:
9454 /* If we are AND'ing with a small constant, do this comparison in the
9455 smallest type that fits. If the machine doesn't have comparisons
9456 that small, it will be converted back to the wider comparison.
9457 This helps if we are testing the sign bit of a narrower object.
9458 combine can't do this for us because it can't know whether a
9459 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9461 if (! SLOW_BYTE_ACCESS
9462 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9463 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9464 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9465 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9466 && (type = type_for_mode (mode, 1)) != 0
9467 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9468 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9469 != CODE_FOR_nothing))
9471 do_jump (convert (type, exp), if_false_label, if_true_label);
9472 break;
9474 goto normal;
9476 case TRUTH_NOT_EXPR:
9477 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9478 break;
9480 case TRUTH_ANDIF_EXPR:
9481 if (if_false_label == 0)
9482 if_false_label = drop_through_label = gen_label_rtx ();
9483 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9484 start_cleanup_deferral ();
9485 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9486 end_cleanup_deferral ();
9487 break;
9489 case TRUTH_ORIF_EXPR:
9490 if (if_true_label == 0)
9491 if_true_label = drop_through_label = gen_label_rtx ();
9492 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9493 start_cleanup_deferral ();
9494 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9495 end_cleanup_deferral ();
9496 break;
9498 case COMPOUND_EXPR:
9499 push_temp_slots ();
9500 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9501 preserve_temp_slots (NULL_RTX);
9502 free_temp_slots ();
9503 pop_temp_slots ();
9504 emit_queue ();
9505 do_pending_stack_adjust ();
9506 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9507 break;
9509 case COMPONENT_REF:
9510 case BIT_FIELD_REF:
9511 case ARRAY_REF:
9513 HOST_WIDE_INT bitsize, bitpos;
9514 int unsignedp;
9515 enum machine_mode mode;
9516 tree type;
9517 tree offset;
9518 int volatilep = 0;
9519 unsigned int alignment;
9521 /* Get description of this reference. We don't actually care
9522 about the underlying object here. */
9523 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9524 &unsignedp, &volatilep, &alignment);
9526 type = type_for_size (bitsize, unsignedp);
9527 if (! SLOW_BYTE_ACCESS
9528 && type != 0 && bitsize >= 0
9529 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9530 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9531 != CODE_FOR_nothing))
9533 do_jump (convert (type, exp), if_false_label, if_true_label);
9534 break;
9536 goto normal;
9539 case COND_EXPR:
9540 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9541 if (integer_onep (TREE_OPERAND (exp, 1))
9542 && integer_zerop (TREE_OPERAND (exp, 2)))
9543 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9545 else if (integer_zerop (TREE_OPERAND (exp, 1))
9546 && integer_onep (TREE_OPERAND (exp, 2)))
9547 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9549 else
9551 register rtx label1 = gen_label_rtx ();
9552 drop_through_label = gen_label_rtx ();
9554 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9556 start_cleanup_deferral ();
9557 /* Now the THEN-expression. */
9558 do_jump (TREE_OPERAND (exp, 1),
9559 if_false_label ? if_false_label : drop_through_label,
9560 if_true_label ? if_true_label : drop_through_label);
9561 /* In case the do_jump just above never jumps. */
9562 do_pending_stack_adjust ();
9563 emit_label (label1);
9565 /* Now the ELSE-expression. */
9566 do_jump (TREE_OPERAND (exp, 2),
9567 if_false_label ? if_false_label : drop_through_label,
9568 if_true_label ? if_true_label : drop_through_label);
9569 end_cleanup_deferral ();
9571 break;
9573 case EQ_EXPR:
9575 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9577 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9578 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9580 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9581 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9582 do_jump
9583 (fold
9584 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9585 fold (build (EQ_EXPR, TREE_TYPE (exp),
9586 fold (build1 (REALPART_EXPR,
9587 TREE_TYPE (inner_type),
9588 exp0)),
9589 fold (build1 (REALPART_EXPR,
9590 TREE_TYPE (inner_type),
9591 exp1)))),
9592 fold (build (EQ_EXPR, TREE_TYPE (exp),
9593 fold (build1 (IMAGPART_EXPR,
9594 TREE_TYPE (inner_type),
9595 exp0)),
9596 fold (build1 (IMAGPART_EXPR,
9597 TREE_TYPE (inner_type),
9598 exp1)))))),
9599 if_false_label, if_true_label);
9602 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9603 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9605 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9606 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9607 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9608 else
9609 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9610 break;
9613 case NE_EXPR:
9615 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9617 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9618 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9620 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9621 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9622 do_jump
9623 (fold
9624 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9625 fold (build (NE_EXPR, TREE_TYPE (exp),
9626 fold (build1 (REALPART_EXPR,
9627 TREE_TYPE (inner_type),
9628 exp0)),
9629 fold (build1 (REALPART_EXPR,
9630 TREE_TYPE (inner_type),
9631 exp1)))),
9632 fold (build (NE_EXPR, TREE_TYPE (exp),
9633 fold (build1 (IMAGPART_EXPR,
9634 TREE_TYPE (inner_type),
9635 exp0)),
9636 fold (build1 (IMAGPART_EXPR,
9637 TREE_TYPE (inner_type),
9638 exp1)))))),
9639 if_false_label, if_true_label);
9642 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9643 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9645 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9646 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9647 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9648 else
9649 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9650 break;
9653 case LT_EXPR:
9654 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9655 if (GET_MODE_CLASS (mode) == MODE_INT
9656 && ! can_compare_p (LT, mode, ccp_jump))
9657 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9658 else
9659 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9660 break;
9662 case LE_EXPR:
9663 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9664 if (GET_MODE_CLASS (mode) == MODE_INT
9665 && ! can_compare_p (LE, mode, ccp_jump))
9666 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9667 else
9668 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9669 break;
9671 case GT_EXPR:
9672 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9673 if (GET_MODE_CLASS (mode) == MODE_INT
9674 && ! can_compare_p (GT, mode, ccp_jump))
9675 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9676 else
9677 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9678 break;
9680 case GE_EXPR:
9681 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9682 if (GET_MODE_CLASS (mode) == MODE_INT
9683 && ! can_compare_p (GE, mode, ccp_jump))
9684 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9685 else
9686 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9687 break;
9689 case UNORDERED_EXPR:
9690 case ORDERED_EXPR:
9692 enum rtx_code cmp, rcmp;
9693 int do_rev;
9695 if (code == UNORDERED_EXPR)
9696 cmp = UNORDERED, rcmp = ORDERED;
9697 else
9698 cmp = ORDERED, rcmp = UNORDERED;
9699 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9701 do_rev = 0;
9702 if (! can_compare_p (cmp, mode, ccp_jump)
9703 && (can_compare_p (rcmp, mode, ccp_jump)
9704 /* If the target doesn't provide either UNORDERED or ORDERED
9705 comparisons, canonicalize on UNORDERED for the library. */
9706 || rcmp == UNORDERED))
9707 do_rev = 1;
9709 if (! do_rev)
9710 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9711 else
9712 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9714 break;
9717 enum rtx_code rcode1;
9718 enum tree_code tcode2;
9720 case UNLT_EXPR:
9721 rcode1 = UNLT;
9722 tcode2 = LT_EXPR;
9723 goto unordered_bcc;
9724 case UNLE_EXPR:
9725 rcode1 = UNLE;
9726 tcode2 = LE_EXPR;
9727 goto unordered_bcc;
9728 case UNGT_EXPR:
9729 rcode1 = UNGT;
9730 tcode2 = GT_EXPR;
9731 goto unordered_bcc;
9732 case UNGE_EXPR:
9733 rcode1 = UNGE;
9734 tcode2 = GE_EXPR;
9735 goto unordered_bcc;
9736 case UNEQ_EXPR:
9737 rcode1 = UNEQ;
9738 tcode2 = EQ_EXPR;
9739 goto unordered_bcc;
9741 unordered_bcc:
9742 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9743 if (can_compare_p (rcode1, mode, ccp_jump))
9744 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9745 if_true_label);
9746 else
9748 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9749 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9750 tree cmp0, cmp1;
9752 /* If the target doesn't support combined unordered
9753 compares, decompose into UNORDERED + comparison. */
9754 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9755 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9756 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9757 do_jump (exp, if_false_label, if_true_label);
9760 break;
9762 default:
9763 normal:
9764 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9765 #if 0
9766 /* This is not needed any more and causes poor code since it causes
9767 comparisons and tests from non-SI objects to have different code
9768 sequences. */
9769 /* Copy to register to avoid generating bad insns by cse
9770 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9771 if (!cse_not_expected && GET_CODE (temp) == MEM)
9772 temp = copy_to_reg (temp);
9773 #endif
9774 do_pending_stack_adjust ();
9775 /* Do any postincrements in the expression that was tested. */
9776 emit_queue ();
9778 if (GET_CODE (temp) == CONST_INT
9779 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9780 || GET_CODE (temp) == LABEL_REF)
9782 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9783 if (target)
9784 emit_jump (target);
9786 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9787 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9788 /* Note swapping the labels gives us not-equal. */
9789 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9790 else if (GET_MODE (temp) != VOIDmode)
9791 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9792 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9793 GET_MODE (temp), NULL_RTX, 0,
9794 if_false_label, if_true_label);
9795 else
9796 abort ();
9799 if (drop_through_label)
9801 /* If do_jump produces code that might be jumped around,
9802 do any stack adjusts from that code, before the place
9803 where control merges in. */
9804 do_pending_stack_adjust ();
9805 emit_label (drop_through_label);
9809 /* Given a comparison expression EXP for values too wide to be compared
9810 with one insn, test the comparison and jump to the appropriate label.
9811 The code of EXP is ignored; we always test GT if SWAP is 0,
9812 and LT if SWAP is 1. */
9814 static void
9815 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9816 tree exp;
9817 int swap;
9818 rtx if_false_label, if_true_label;
9820 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9821 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9822 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9823 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9825 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9828 /* Compare OP0 with OP1, word at a time, in mode MODE.
9829 UNSIGNEDP says to do unsigned comparison.
9830 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9832 void
9833 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9834 enum machine_mode mode;
9835 int unsignedp;
9836 rtx op0, op1;
9837 rtx if_false_label, if_true_label;
9839 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9840 rtx drop_through_label = 0;
9841 int i;
9843 if (! if_true_label || ! if_false_label)
9844 drop_through_label = gen_label_rtx ();
9845 if (! if_true_label)
9846 if_true_label = drop_through_label;
9847 if (! if_false_label)
9848 if_false_label = drop_through_label;
9850 /* Compare a word at a time, high order first. */
9851 for (i = 0; i < nwords; i++)
9853 rtx op0_word, op1_word;
9855 if (WORDS_BIG_ENDIAN)
9857 op0_word = operand_subword_force (op0, i, mode);
9858 op1_word = operand_subword_force (op1, i, mode);
9860 else
9862 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9863 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9866 /* All but high-order word must be compared as unsigned. */
9867 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9868 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9869 NULL_RTX, if_true_label);
9871 /* Consider lower words only if these are equal. */
9872 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9873 NULL_RTX, 0, NULL_RTX, if_false_label);
9876 if (if_false_label)
9877 emit_jump (if_false_label);
9878 if (drop_through_label)
9879 emit_label (drop_through_label);
9882 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9883 with one insn, test the comparison and jump to the appropriate label. */
9885 static void
9886 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9887 tree exp;
9888 rtx if_false_label, if_true_label;
9890 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9891 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9892 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9893 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9894 int i;
9895 rtx drop_through_label = 0;
9897 if (! if_false_label)
9898 drop_through_label = if_false_label = gen_label_rtx ();
9900 for (i = 0; i < nwords; i++)
9901 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9902 operand_subword_force (op1, i, mode),
9903 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9904 word_mode, NULL_RTX, 0, if_false_label,
9905 NULL_RTX);
9907 if (if_true_label)
9908 emit_jump (if_true_label);
9909 if (drop_through_label)
9910 emit_label (drop_through_label);
9913 /* Jump according to whether OP0 is 0.
9914 We assume that OP0 has an integer mode that is too wide
9915 for the available compare insns. */
9917 void
9918 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9919 rtx op0;
9920 rtx if_false_label, if_true_label;
9922 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9923 rtx part;
9924 int i;
9925 rtx drop_through_label = 0;
9927 /* The fastest way of doing this comparison on almost any machine is to
9928 "or" all the words and compare the result. If all have to be loaded
9929 from memory and this is a very wide item, it's possible this may
9930 be slower, but that's highly unlikely. */
9932 part = gen_reg_rtx (word_mode);
9933 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9934 for (i = 1; i < nwords && part != 0; i++)
9935 part = expand_binop (word_mode, ior_optab, part,
9936 operand_subword_force (op0, i, GET_MODE (op0)),
9937 part, 1, OPTAB_WIDEN);
9939 if (part != 0)
9941 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9942 NULL_RTX, 0, if_false_label, if_true_label);
9944 return;
9947 /* If we couldn't do the "or" simply, do this with a series of compares. */
9948 if (! if_false_label)
9949 drop_through_label = if_false_label = gen_label_rtx ();
9951 for (i = 0; i < nwords; i++)
9952 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9953 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9954 if_false_label, NULL_RTX);
9956 if (if_true_label)
9957 emit_jump (if_true_label);
9959 if (drop_through_label)
9960 emit_label (drop_through_label);
9963 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9964 (including code to compute the values to be compared)
9965 and set (CC0) according to the result.
9966 The decision as to signed or unsigned comparison must be made by the caller.
9968 We force a stack adjustment unless there are currently
9969 things pushed on the stack that aren't yet used.
9971 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9972 compared.
9974 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9975 size of MODE should be used. */
9978 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9979 register rtx op0, op1;
9980 enum rtx_code code;
9981 int unsignedp;
9982 enum machine_mode mode;
9983 rtx size;
9984 unsigned int align;
9986 rtx tem;
9988 /* If one operand is constant, make it the second one. Only do this
9989 if the other operand is not constant as well. */
9991 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9992 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9994 tem = op0;
9995 op0 = op1;
9996 op1 = tem;
9997 code = swap_condition (code);
10000 if (flag_force_mem)
10002 op0 = force_not_mem (op0);
10003 op1 = force_not_mem (op1);
10006 do_pending_stack_adjust ();
10008 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10009 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10010 return tem;
10012 #if 0
10013 /* There's no need to do this now that combine.c can eliminate lots of
10014 sign extensions. This can be less efficient in certain cases on other
10015 machines. */
10017 /* If this is a signed equality comparison, we can do it as an
10018 unsigned comparison since zero-extension is cheaper than sign
10019 extension and comparisons with zero are done as unsigned. This is
10020 the case even on machines that can do fast sign extension, since
10021 zero-extension is easier to combine with other operations than
10022 sign-extension is. If we are comparing against a constant, we must
10023 convert it to what it would look like unsigned. */
10024 if ((code == EQ || code == NE) && ! unsignedp
10025 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10027 if (GET_CODE (op1) == CONST_INT
10028 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10029 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10030 unsignedp = 1;
10032 #endif
10034 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10036 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10039 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10040 The decision as to signed or unsigned comparison must be made by the caller.
10042 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10043 compared.
10045 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10046 size of MODE should be used. */
10048 void
10049 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
10050 if_false_label, if_true_label)
10051 register rtx op0, op1;
10052 enum rtx_code code;
10053 int unsignedp;
10054 enum machine_mode mode;
10055 rtx size;
10056 unsigned int align;
10057 rtx if_false_label, if_true_label;
10059 rtx tem;
10060 int dummy_true_label = 0;
10062 /* Reverse the comparison if that is safe and we want to jump if it is
10063 false. */
10064 if (! if_true_label && ! FLOAT_MODE_P (mode))
10066 if_true_label = if_false_label;
10067 if_false_label = 0;
10068 code = reverse_condition (code);
10071 /* If one operand is constant, make it the second one. Only do this
10072 if the other operand is not constant as well. */
10074 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10075 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10077 tem = op0;
10078 op0 = op1;
10079 op1 = tem;
10080 code = swap_condition (code);
10083 if (flag_force_mem)
10085 op0 = force_not_mem (op0);
10086 op1 = force_not_mem (op1);
10089 do_pending_stack_adjust ();
10091 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10092 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10094 if (tem == const_true_rtx)
10096 if (if_true_label)
10097 emit_jump (if_true_label);
10099 else
10101 if (if_false_label)
10102 emit_jump (if_false_label);
10104 return;
10107 #if 0
10108 /* There's no need to do this now that combine.c can eliminate lots of
10109 sign extensions. This can be less efficient in certain cases on other
10110 machines. */
10112 /* If this is a signed equality comparison, we can do it as an
10113 unsigned comparison since zero-extension is cheaper than sign
10114 extension and comparisons with zero are done as unsigned. This is
10115 the case even on machines that can do fast sign extension, since
10116 zero-extension is easier to combine with other operations than
10117 sign-extension is. If we are comparing against a constant, we must
10118 convert it to what it would look like unsigned. */
10119 if ((code == EQ || code == NE) && ! unsignedp
10120 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10122 if (GET_CODE (op1) == CONST_INT
10123 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10124 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10125 unsignedp = 1;
10127 #endif
10129 if (! if_true_label)
10131 dummy_true_label = 1;
10132 if_true_label = gen_label_rtx ();
10135 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10136 if_true_label);
10138 if (if_false_label)
10139 emit_jump (if_false_label);
10140 if (dummy_true_label)
10141 emit_label (if_true_label);
10144 /* Generate code for a comparison expression EXP (including code to compute
10145 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10146 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10147 generated code will drop through.
10148 SIGNED_CODE should be the rtx operation for this comparison for
10149 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10151 We force a stack adjustment unless there are currently
10152 things pushed on the stack that aren't yet used. */
10154 static void
10155 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10156 if_true_label)
10157 register tree exp;
10158 enum rtx_code signed_code, unsigned_code;
10159 rtx if_false_label, if_true_label;
10161 unsigned int align0, align1;
10162 register rtx op0, op1;
10163 register tree type;
10164 register enum machine_mode mode;
10165 int unsignedp;
10166 enum rtx_code code;
10168 /* Don't crash if the comparison was erroneous. */
10169 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10170 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10171 return;
10173 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10174 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10175 return;
10177 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10178 mode = TYPE_MODE (type);
10179 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10180 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10181 || (GET_MODE_BITSIZE (mode)
10182 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10183 1)))))))
10185 /* op0 might have been replaced by promoted constant, in which
10186 case the type of second argument should be used. */
10187 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10188 mode = TYPE_MODE (type);
10190 unsignedp = TREE_UNSIGNED (type);
10191 code = unsignedp ? unsigned_code : signed_code;
10193 #ifdef HAVE_canonicalize_funcptr_for_compare
10194 /* If function pointers need to be "canonicalized" before they can
10195 be reliably compared, then canonicalize them. */
10196 if (HAVE_canonicalize_funcptr_for_compare
10197 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10198 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10199 == FUNCTION_TYPE))
10201 rtx new_op0 = gen_reg_rtx (mode);
10203 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10204 op0 = new_op0;
10207 if (HAVE_canonicalize_funcptr_for_compare
10208 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10209 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10210 == FUNCTION_TYPE))
10212 rtx new_op1 = gen_reg_rtx (mode);
10214 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10215 op1 = new_op1;
10217 #endif
10219 /* Do any postincrements in the expression that was tested. */
10220 emit_queue ();
10222 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10223 ((mode == BLKmode)
10224 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10225 MIN (align0, align1),
10226 if_false_label, if_true_label);
10229 /* Generate code to calculate EXP using a store-flag instruction
10230 and return an rtx for the result. EXP is either a comparison
10231 or a TRUTH_NOT_EXPR whose operand is a comparison.
10233 If TARGET is nonzero, store the result there if convenient.
10235 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10236 cheap.
10238 Return zero if there is no suitable set-flag instruction
10239 available on this machine.
10241 Once expand_expr has been called on the arguments of the comparison,
10242 we are committed to doing the store flag, since it is not safe to
10243 re-evaluate the expression. We emit the store-flag insn by calling
10244 emit_store_flag, but only expand the arguments if we have a reason
10245 to believe that emit_store_flag will be successful. If we think that
10246 it will, but it isn't, we have to simulate the store-flag with a
10247 set/jump/set sequence. */
10249 static rtx
10250 do_store_flag (exp, target, mode, only_cheap)
10251 tree exp;
10252 rtx target;
10253 enum machine_mode mode;
10254 int only_cheap;
10256 enum rtx_code code;
10257 tree arg0, arg1, type;
10258 tree tem;
10259 enum machine_mode operand_mode;
10260 int invert = 0;
10261 int unsignedp;
10262 rtx op0, op1;
10263 enum insn_code icode;
10264 rtx subtarget = target;
10265 rtx result, label;
10267 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10268 result at the end. We can't simply invert the test since it would
10269 have already been inverted if it were valid. This case occurs for
10270 some floating-point comparisons. */
10272 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10273 invert = 1, exp = TREE_OPERAND (exp, 0);
10275 arg0 = TREE_OPERAND (exp, 0);
10276 arg1 = TREE_OPERAND (exp, 1);
10278 /* Don't crash if the comparison was erroneous. */
10279 if (arg0 == error_mark_node || arg1 == error_mark_node)
10280 return const0_rtx;
10282 type = TREE_TYPE (arg0);
10283 operand_mode = TYPE_MODE (type);
10284 unsignedp = TREE_UNSIGNED (type);
10286 /* We won't bother with BLKmode store-flag operations because it would mean
10287 passing a lot of information to emit_store_flag. */
10288 if (operand_mode == BLKmode)
10289 return 0;
10291 /* We won't bother with store-flag operations involving function pointers
10292 when function pointers must be canonicalized before comparisons. */
10293 #ifdef HAVE_canonicalize_funcptr_for_compare
10294 if (HAVE_canonicalize_funcptr_for_compare
10295 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10296 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10297 == FUNCTION_TYPE))
10298 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10299 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10300 == FUNCTION_TYPE))))
10301 return 0;
10302 #endif
10304 STRIP_NOPS (arg0);
10305 STRIP_NOPS (arg1);
10307 /* Get the rtx comparison code to use. We know that EXP is a comparison
10308 operation of some type. Some comparisons against 1 and -1 can be
10309 converted to comparisons with zero. Do so here so that the tests
10310 below will be aware that we have a comparison with zero. These
10311 tests will not catch constants in the first operand, but constants
10312 are rarely passed as the first operand. */
10314 switch (TREE_CODE (exp))
10316 case EQ_EXPR:
10317 code = EQ;
10318 break;
10319 case NE_EXPR:
10320 code = NE;
10321 break;
10322 case LT_EXPR:
10323 if (integer_onep (arg1))
10324 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10325 else
10326 code = unsignedp ? LTU : LT;
10327 break;
10328 case LE_EXPR:
10329 if (! unsignedp && integer_all_onesp (arg1))
10330 arg1 = integer_zero_node, code = LT;
10331 else
10332 code = unsignedp ? LEU : LE;
10333 break;
10334 case GT_EXPR:
10335 if (! unsignedp && integer_all_onesp (arg1))
10336 arg1 = integer_zero_node, code = GE;
10337 else
10338 code = unsignedp ? GTU : GT;
10339 break;
10340 case GE_EXPR:
10341 if (integer_onep (arg1))
10342 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10343 else
10344 code = unsignedp ? GEU : GE;
10345 break;
10347 case UNORDERED_EXPR:
10348 code = UNORDERED;
10349 break;
10350 case ORDERED_EXPR:
10351 code = ORDERED;
10352 break;
10353 case UNLT_EXPR:
10354 code = UNLT;
10355 break;
10356 case UNLE_EXPR:
10357 code = UNLE;
10358 break;
10359 case UNGT_EXPR:
10360 code = UNGT;
10361 break;
10362 case UNGE_EXPR:
10363 code = UNGE;
10364 break;
10365 case UNEQ_EXPR:
10366 code = UNEQ;
10367 break;
10369 default:
10370 abort ();
10373 /* Put a constant second. */
10374 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10376 tem = arg0; arg0 = arg1; arg1 = tem;
10377 code = swap_condition (code);
10380 /* If this is an equality or inequality test of a single bit, we can
10381 do this by shifting the bit being tested to the low-order bit and
10382 masking the result with the constant 1. If the condition was EQ,
10383 we xor it with 1. This does not require an scc insn and is faster
10384 than an scc insn even if we have it. */
10386 if ((code == NE || code == EQ)
10387 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10388 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10390 tree inner = TREE_OPERAND (arg0, 0);
10391 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10392 int ops_unsignedp;
10394 /* If INNER is a right shift of a constant and it plus BITNUM does
10395 not overflow, adjust BITNUM and INNER. */
10397 if (TREE_CODE (inner) == RSHIFT_EXPR
10398 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10399 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10400 && bitnum < TYPE_PRECISION (type)
10401 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10402 bitnum - TYPE_PRECISION (type)))
10404 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10405 inner = TREE_OPERAND (inner, 0);
10408 /* If we are going to be able to omit the AND below, we must do our
10409 operations as unsigned. If we must use the AND, we have a choice.
10410 Normally unsigned is faster, but for some machines signed is. */
10411 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10412 #ifdef LOAD_EXTEND_OP
10413 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10414 #else
10416 #endif
10419 if (! get_subtarget (subtarget)
10420 || GET_MODE (subtarget) != operand_mode
10421 || ! safe_from_p (subtarget, inner, 1))
10422 subtarget = 0;
10424 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10426 if (bitnum != 0)
10427 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10428 size_int (bitnum), subtarget, ops_unsignedp);
10430 if (GET_MODE (op0) != mode)
10431 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10433 if ((code == EQ && ! invert) || (code == NE && invert))
10434 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10435 ops_unsignedp, OPTAB_LIB_WIDEN);
10437 /* Put the AND last so it can combine with more things. */
10438 if (bitnum != TYPE_PRECISION (type) - 1)
10439 op0 = expand_and (op0, const1_rtx, subtarget);
10441 return op0;
10444 /* Now see if we are likely to be able to do this. Return if not. */
10445 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10446 return 0;
10448 icode = setcc_gen_code[(int) code];
10449 if (icode == CODE_FOR_nothing
10450 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10452 /* We can only do this if it is one of the special cases that
10453 can be handled without an scc insn. */
10454 if ((code == LT && integer_zerop (arg1))
10455 || (! only_cheap && code == GE && integer_zerop (arg1)))
10457 else if (BRANCH_COST >= 0
10458 && ! only_cheap && (code == NE || code == EQ)
10459 && TREE_CODE (type) != REAL_TYPE
10460 && ((abs_optab->handlers[(int) operand_mode].insn_code
10461 != CODE_FOR_nothing)
10462 || (ffs_optab->handlers[(int) operand_mode].insn_code
10463 != CODE_FOR_nothing)))
10465 else
10466 return 0;
10469 if (! get_subtarget (target)
10470 || GET_MODE (subtarget) != operand_mode
10471 || ! safe_from_p (subtarget, arg1, 1))
10472 subtarget = 0;
10474 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10475 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10477 if (target == 0)
10478 target = gen_reg_rtx (mode);
10480 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10481 because, if the emit_store_flag does anything it will succeed and
10482 OP0 and OP1 will not be used subsequently. */
10484 result = emit_store_flag (target, code,
10485 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10486 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10487 operand_mode, unsignedp, 1);
10489 if (result)
10491 if (invert)
10492 result = expand_binop (mode, xor_optab, result, const1_rtx,
10493 result, 0, OPTAB_LIB_WIDEN);
10494 return result;
10497 /* If this failed, we have to do this with set/compare/jump/set code. */
10498 if (GET_CODE (target) != REG
10499 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10500 target = gen_reg_rtx (GET_MODE (target));
10502 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10503 result = compare_from_rtx (op0, op1, code, unsignedp,
10504 operand_mode, NULL_RTX, 0);
10505 if (GET_CODE (result) == CONST_INT)
10506 return (((result == const0_rtx && ! invert)
10507 || (result != const0_rtx && invert))
10508 ? const0_rtx : const1_rtx);
10510 label = gen_label_rtx ();
10511 if (bcc_gen_fctn[(int) code] == 0)
10512 abort ();
10514 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10515 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10516 emit_label (label);
10518 return target;
10521 /* Generate a tablejump instruction (used for switch statements). */
10523 #ifdef HAVE_tablejump
10525 /* INDEX is the value being switched on, with the lowest value
10526 in the table already subtracted.
10527 MODE is its expected mode (needed if INDEX is constant).
10528 RANGE is the length of the jump table.
10529 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10531 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10532 index value is out of range. */
10534 void
10535 do_tablejump (index, mode, range, table_label, default_label)
10536 rtx index, range, table_label, default_label;
10537 enum machine_mode mode;
10539 register rtx temp, vector;
10541 /* Do an unsigned comparison (in the proper mode) between the index
10542 expression and the value which represents the length of the range.
10543 Since we just finished subtracting the lower bound of the range
10544 from the index expression, this comparison allows us to simultaneously
10545 check that the original index expression value is both greater than
10546 or equal to the minimum value of the range and less than or equal to
10547 the maximum value of the range. */
10549 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10550 0, default_label);
10552 /* If index is in range, it must fit in Pmode.
10553 Convert to Pmode so we can index with it. */
10554 if (mode != Pmode)
10555 index = convert_to_mode (Pmode, index, 1);
10557 /* Don't let a MEM slip thru, because then INDEX that comes
10558 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10559 and break_out_memory_refs will go to work on it and mess it up. */
10560 #ifdef PIC_CASE_VECTOR_ADDRESS
10561 if (flag_pic && GET_CODE (index) != REG)
10562 index = copy_to_mode_reg (Pmode, index);
10563 #endif
10565 /* If flag_force_addr were to affect this address
10566 it could interfere with the tricky assumptions made
10567 about addresses that contain label-refs,
10568 which may be valid only very near the tablejump itself. */
10569 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10570 GET_MODE_SIZE, because this indicates how large insns are. The other
10571 uses should all be Pmode, because they are addresses. This code
10572 could fail if addresses and insns are not the same size. */
10573 index = gen_rtx_PLUS (Pmode,
10574 gen_rtx_MULT (Pmode, index,
10575 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10576 gen_rtx_LABEL_REF (Pmode, table_label));
10577 #ifdef PIC_CASE_VECTOR_ADDRESS
10578 if (flag_pic)
10579 index = PIC_CASE_VECTOR_ADDRESS (index);
10580 else
10581 #endif
10582 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10583 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10584 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10585 RTX_UNCHANGING_P (vector) = 1;
10586 convert_move (temp, vector, 0);
10588 emit_jump_insn (gen_tablejump (temp, table_label));
10590 /* If we are generating PIC code or if the table is PC-relative, the
10591 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10592 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10593 emit_barrier ();
10596 #endif /* HAVE_tablejump */