* gcc.dg/cpp/assembl2.S: New test case.
[official-gcc.git] / gcc / expr.c
bloba4637abc359c3187cd70c523a208d40f6e0ab6d2
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "insn-config.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37 #include "expr.h"
38 #include "recog.h"
39 #include "reload.h"
40 #include "output.h"
41 #include "typeclass.h"
42 #include "defaults.h"
43 #include "toplev.h"
44 #include "ggc.h"
45 #include "intl.h"
46 #include "tm_p.h"
48 #ifndef ACCUMULATE_OUTGOING_ARGS
49 #define ACCUMULATE_OUTGOING_ARGS 0
50 #endif
52 /* Supply a default definition for PUSH_ARGS. */
53 #ifndef PUSH_ARGS
54 #ifdef PUSH_ROUNDING
55 #define PUSH_ARGS !ACCUMULATE_OUTGOING_ARGS
56 #else
57 #define PUSH_ARGS 0
58 #endif
59 #endif
61 /* Decide whether a function's arguments should be processed
62 from first to last or from last to first.
64 They should if the stack and args grow in opposite directions, but
65 only if we have push insns. */
67 #ifdef PUSH_ROUNDING
69 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
70 #define PUSH_ARGS_REVERSED /* If it's last to first. */
71 #endif
73 #endif
75 #ifndef STACK_PUSH_CODE
76 #ifdef STACK_GROWS_DOWNWARD
77 #define STACK_PUSH_CODE PRE_DEC
78 #else
79 #define STACK_PUSH_CODE PRE_INC
80 #endif
81 #endif
83 /* Assume that case vectors are not pc-relative. */
84 #ifndef CASE_VECTOR_PC_RELATIVE
85 #define CASE_VECTOR_PC_RELATIVE 0
86 #endif
88 /* Hook called by safe_from_p for language-specific tree codes. It is
89 up to the language front-end to install a hook if it has any such
90 codes that safe_from_p needs to know about. Since same_from_p will
91 recursively explore the TREE_OPERANDs of an expression, this hook
92 should not reexamine those pieces. This routine may recursively
93 call safe_from_p; it should always pass `0' as the TOP_P
94 parameter. */
95 int (*lang_safe_from_p) PARAMS ((rtx, tree));
97 /* If this is nonzero, we do not bother generating VOLATILE
98 around volatile memory references, and we are willing to
99 output indirect addresses. If cse is to follow, we reject
100 indirect addresses so a useful potential cse is generated;
101 if it is used only once, instruction combination will produce
102 the same indirect address eventually. */
103 int cse_not_expected;
105 /* Don't check memory usage, since code is being emitted to check a memory
106 usage. Used when current_function_check_memory_usage is true, to avoid
107 infinite recursion. */
108 static int in_check_memory_usage;
110 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
111 static tree placeholder_list = 0;
113 /* This structure is used by move_by_pieces to describe the move to
114 be performed. */
115 struct move_by_pieces
117 rtx to;
118 rtx to_addr;
119 int autinc_to;
120 int explicit_inc_to;
121 rtx from;
122 rtx from_addr;
123 int autinc_from;
124 int explicit_inc_from;
125 unsigned HOST_WIDE_INT len;
126 HOST_WIDE_INT offset;
127 int reverse;
130 /* This structure is used by store_by_pieces to describe the clear to
131 be performed. */
133 struct store_by_pieces
135 rtx to;
136 rtx to_addr;
137 int autinc_to;
138 int explicit_inc_to;
139 unsigned HOST_WIDE_INT len;
140 HOST_WIDE_INT offset;
141 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
142 PTR constfundata;
143 int reverse;
146 extern struct obstack permanent_obstack;
148 static rtx get_push_address PARAMS ((int));
150 static rtx enqueue_insn PARAMS ((rtx, rtx));
151 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
152 PARAMS ((unsigned HOST_WIDE_INT,
153 unsigned int));
154 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
155 struct move_by_pieces *));
156 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
157 enum machine_mode));
158 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
159 unsigned int));
160 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
161 unsigned int));
162 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
163 enum machine_mode,
164 struct store_by_pieces *));
165 static rtx get_subtarget PARAMS ((rtx));
166 static int is_zeros_p PARAMS ((tree));
167 static int mostly_zeros_p PARAMS ((tree));
168 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
169 HOST_WIDE_INT, enum machine_mode,
170 tree, tree, unsigned int, int,
171 int));
172 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
173 HOST_WIDE_INT));
174 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
175 HOST_WIDE_INT, enum machine_mode,
176 tree, enum machine_mode, int,
177 unsigned int, HOST_WIDE_INT, int));
178 static enum memory_use_mode
179 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
180 static tree save_noncopied_parts PARAMS ((tree, tree));
181 static tree init_noncopied_parts PARAMS ((tree, tree));
182 static int fixed_type_p PARAMS ((tree));
183 static rtx var_rtx PARAMS ((tree));
184 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
185 static rtx expand_increment PARAMS ((tree, int, int));
186 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
187 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
188 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
189 rtx, rtx));
190 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
192 /* Record for each mode whether we can move a register directly to or
193 from an object of that mode in memory. If we can't, we won't try
194 to use that mode directly when accessing a field of that mode. */
196 static char direct_load[NUM_MACHINE_MODES];
197 static char direct_store[NUM_MACHINE_MODES];
199 /* If a memory-to-memory move would take MOVE_RATIO or more simple
200 move-instruction sequences, we will do a movstr or libcall instead. */
202 #ifndef MOVE_RATIO
203 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
204 #define MOVE_RATIO 2
205 #else
206 /* If we are optimizing for space (-Os), cut down the default move ratio. */
207 #define MOVE_RATIO (optimize_size ? 3 : 15)
208 #endif
209 #endif
211 /* This macro is used to determine whether move_by_pieces should be called
212 to perform a structure copy. */
213 #ifndef MOVE_BY_PIECES_P
214 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
215 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
216 #endif
218 /* This array records the insn_code of insns to perform block moves. */
219 enum insn_code movstr_optab[NUM_MACHINE_MODES];
221 /* This array records the insn_code of insns to perform block clears. */
222 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
224 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
226 #ifndef SLOW_UNALIGNED_ACCESS
227 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
228 #endif
230 /* This is run once per compilation to set up which modes can be used
231 directly in memory and to initialize the block move optab. */
233 void
234 init_expr_once ()
236 rtx insn, pat;
237 enum machine_mode mode;
238 int num_clobbers;
239 rtx mem, mem1;
241 start_sequence ();
243 /* Try indexing by frame ptr and try by stack ptr.
244 It is known that on the Convex the stack ptr isn't a valid index.
245 With luck, one or the other is valid on any machine. */
246 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
247 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
249 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
250 pat = PATTERN (insn);
252 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
253 mode = (enum machine_mode) ((int) mode + 1))
255 int regno;
256 rtx reg;
258 direct_load[(int) mode] = direct_store[(int) mode] = 0;
259 PUT_MODE (mem, mode);
260 PUT_MODE (mem1, mode);
262 /* See if there is some register that can be used in this mode and
263 directly loaded or stored from memory. */
265 if (mode != VOIDmode && mode != BLKmode)
266 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
267 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
268 regno++)
270 if (! HARD_REGNO_MODE_OK (regno, mode))
271 continue;
273 reg = gen_rtx_REG (mode, regno);
275 SET_SRC (pat) = mem;
276 SET_DEST (pat) = reg;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_load[(int) mode] = 1;
280 SET_SRC (pat) = mem1;
281 SET_DEST (pat) = reg;
282 if (recog (pat, insn, &num_clobbers) >= 0)
283 direct_load[(int) mode] = 1;
285 SET_SRC (pat) = reg;
286 SET_DEST (pat) = mem;
287 if (recog (pat, insn, &num_clobbers) >= 0)
288 direct_store[(int) mode] = 1;
290 SET_SRC (pat) = reg;
291 SET_DEST (pat) = mem1;
292 if (recog (pat, insn, &num_clobbers) >= 0)
293 direct_store[(int) mode] = 1;
297 end_sequence ();
300 /* This is run at the start of compiling a function. */
302 void
303 init_expr ()
305 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
307 pending_chain = 0;
308 pending_stack_adjust = 0;
309 stack_pointer_delta = 0;
310 inhibit_defer_pop = 0;
311 saveregs_value = 0;
312 apply_args_value = 0;
313 forced_labels = 0;
316 void
317 mark_expr_status (p)
318 struct expr_status *p;
320 if (p == NULL)
321 return;
323 ggc_mark_rtx (p->x_saveregs_value);
324 ggc_mark_rtx (p->x_apply_args_value);
325 ggc_mark_rtx (p->x_forced_labels);
328 void
329 free_expr_status (f)
330 struct function *f;
332 free (f->expr);
333 f->expr = NULL;
336 /* Small sanity check that the queue is empty at the end of a function. */
338 void
339 finish_expr_for_function ()
341 if (pending_chain)
342 abort ();
345 /* Manage the queue of increment instructions to be output
346 for POSTINCREMENT_EXPR expressions, etc. */
348 /* Queue up to increment (or change) VAR later. BODY says how:
349 BODY should be the same thing you would pass to emit_insn
350 to increment right away. It will go to emit_insn later on.
352 The value is a QUEUED expression to be used in place of VAR
353 where you want to guarantee the pre-incrementation value of VAR. */
355 static rtx
356 enqueue_insn (var, body)
357 rtx var, body;
359 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
360 body, pending_chain);
361 return pending_chain;
364 /* Use protect_from_queue to convert a QUEUED expression
365 into something that you can put immediately into an instruction.
366 If the queued incrementation has not happened yet,
367 protect_from_queue returns the variable itself.
368 If the incrementation has happened, protect_from_queue returns a temp
369 that contains a copy of the old value of the variable.
371 Any time an rtx which might possibly be a QUEUED is to be put
372 into an instruction, it must be passed through protect_from_queue first.
373 QUEUED expressions are not meaningful in instructions.
375 Do not pass a value through protect_from_queue and then hold
376 on to it for a while before putting it in an instruction!
377 If the queue is flushed in between, incorrect code will result. */
380 protect_from_queue (x, modify)
381 register rtx x;
382 int modify;
384 register RTX_CODE code = GET_CODE (x);
386 #if 0 /* A QUEUED can hang around after the queue is forced out. */
387 /* Shortcut for most common case. */
388 if (pending_chain == 0)
389 return x;
390 #endif
392 if (code != QUEUED)
394 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
395 use of autoincrement. Make a copy of the contents of the memory
396 location rather than a copy of the address, but not if the value is
397 of mode BLKmode. Don't modify X in place since it might be
398 shared. */
399 if (code == MEM && GET_MODE (x) != BLKmode
400 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
402 register rtx y = XEXP (x, 0);
403 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
405 MEM_COPY_ATTRIBUTES (new, x);
407 if (QUEUED_INSN (y))
409 register rtx temp = gen_reg_rtx (GET_MODE (new));
410 emit_insn_before (gen_move_insn (temp, new),
411 QUEUED_INSN (y));
412 return temp;
414 return new;
416 /* Otherwise, recursively protect the subexpressions of all
417 the kinds of rtx's that can contain a QUEUED. */
418 if (code == MEM)
420 rtx tem = protect_from_queue (XEXP (x, 0), 0);
421 if (tem != XEXP (x, 0))
423 x = copy_rtx (x);
424 XEXP (x, 0) = tem;
427 else if (code == PLUS || code == MULT)
429 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
430 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
431 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
433 x = copy_rtx (x);
434 XEXP (x, 0) = new0;
435 XEXP (x, 1) = new1;
438 return x;
440 /* If the increment has not happened, use the variable itself. */
441 if (QUEUED_INSN (x) == 0)
442 return QUEUED_VAR (x);
443 /* If the increment has happened and a pre-increment copy exists,
444 use that copy. */
445 if (QUEUED_COPY (x) != 0)
446 return QUEUED_COPY (x);
447 /* The increment has happened but we haven't set up a pre-increment copy.
448 Set one up now, and use it. */
449 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
450 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
451 QUEUED_INSN (x));
452 return QUEUED_COPY (x);
455 /* Return nonzero if X contains a QUEUED expression:
456 if it contains anything that will be altered by a queued increment.
457 We handle only combinations of MEM, PLUS, MINUS and MULT operators
458 since memory addresses generally contain only those. */
461 queued_subexp_p (x)
462 rtx x;
464 register enum rtx_code code = GET_CODE (x);
465 switch (code)
467 case QUEUED:
468 return 1;
469 case MEM:
470 return queued_subexp_p (XEXP (x, 0));
471 case MULT:
472 case PLUS:
473 case MINUS:
474 return (queued_subexp_p (XEXP (x, 0))
475 || queued_subexp_p (XEXP (x, 1)));
476 default:
477 return 0;
481 /* Perform all the pending incrementations. */
483 void
484 emit_queue ()
486 register rtx p;
487 while ((p = pending_chain))
489 rtx body = QUEUED_BODY (p);
491 if (GET_CODE (body) == SEQUENCE)
493 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
494 emit_insn (QUEUED_BODY (p));
496 else
497 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
498 pending_chain = QUEUED_NEXT (p);
502 /* Copy data from FROM to TO, where the machine modes are not the same.
503 Both modes may be integer, or both may be floating.
504 UNSIGNEDP should be nonzero if FROM is an unsigned type.
505 This causes zero-extension instead of sign-extension. */
507 void
508 convert_move (to, from, unsignedp)
509 register rtx to, from;
510 int unsignedp;
512 enum machine_mode to_mode = GET_MODE (to);
513 enum machine_mode from_mode = GET_MODE (from);
514 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
515 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
516 enum insn_code code;
517 rtx libcall;
519 /* rtx code for making an equivalent value. */
520 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
522 to = protect_from_queue (to, 1);
523 from = protect_from_queue (from, 0);
525 if (to_real != from_real)
526 abort ();
528 /* If FROM is a SUBREG that indicates that we have already done at least
529 the required extension, strip it. We don't handle such SUBREGs as
530 TO here. */
532 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
533 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
534 >= GET_MODE_SIZE (to_mode))
535 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
536 from = gen_lowpart (to_mode, from), from_mode = to_mode;
538 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
539 abort ();
541 if (to_mode == from_mode
542 || (from_mode == VOIDmode && CONSTANT_P (from)))
544 emit_move_insn (to, from);
545 return;
548 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
550 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
551 abort ();
553 if (VECTOR_MODE_P (to_mode))
554 from = gen_rtx_SUBREG (to_mode, from, 0);
555 else
556 to = gen_rtx_SUBREG (from_mode, to, 0);
558 emit_move_insn (to, from);
559 return;
562 if (to_real != from_real)
563 abort ();
565 if (to_real)
567 rtx value, insns;
569 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
571 /* Try converting directly if the insn is supported. */
572 if ((code = can_extend_p (to_mode, from_mode, 0))
573 != CODE_FOR_nothing)
575 emit_unop_insn (code, to, from, UNKNOWN);
576 return;
580 #ifdef HAVE_trunchfqf2
581 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
583 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
584 return;
586 #endif
587 #ifdef HAVE_trunctqfqf2
588 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
590 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
591 return;
593 #endif
594 #ifdef HAVE_truncsfqf2
595 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
597 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
598 return;
600 #endif
601 #ifdef HAVE_truncdfqf2
602 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
604 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
605 return;
607 #endif
608 #ifdef HAVE_truncxfqf2
609 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
611 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
612 return;
614 #endif
615 #ifdef HAVE_trunctfqf2
616 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
618 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
619 return;
621 #endif
623 #ifdef HAVE_trunctqfhf2
624 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
626 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
627 return;
629 #endif
630 #ifdef HAVE_truncsfhf2
631 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
633 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
634 return;
636 #endif
637 #ifdef HAVE_truncdfhf2
638 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
640 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
641 return;
643 #endif
644 #ifdef HAVE_truncxfhf2
645 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
647 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
648 return;
650 #endif
651 #ifdef HAVE_trunctfhf2
652 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
654 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
655 return;
657 #endif
659 #ifdef HAVE_truncsftqf2
660 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
662 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
663 return;
665 #endif
666 #ifdef HAVE_truncdftqf2
667 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
669 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
670 return;
672 #endif
673 #ifdef HAVE_truncxftqf2
674 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
676 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
677 return;
679 #endif
680 #ifdef HAVE_trunctftqf2
681 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
683 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
684 return;
686 #endif
688 #ifdef HAVE_truncdfsf2
689 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
691 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
692 return;
694 #endif
695 #ifdef HAVE_truncxfsf2
696 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
698 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
699 return;
701 #endif
702 #ifdef HAVE_trunctfsf2
703 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
705 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
706 return;
708 #endif
709 #ifdef HAVE_truncxfdf2
710 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
712 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
713 return;
715 #endif
716 #ifdef HAVE_trunctfdf2
717 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
719 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
720 return;
722 #endif
724 libcall = (rtx) 0;
725 switch (from_mode)
727 case SFmode:
728 switch (to_mode)
730 case DFmode:
731 libcall = extendsfdf2_libfunc;
732 break;
734 case XFmode:
735 libcall = extendsfxf2_libfunc;
736 break;
738 case TFmode:
739 libcall = extendsftf2_libfunc;
740 break;
742 default:
743 break;
745 break;
747 case DFmode:
748 switch (to_mode)
750 case SFmode:
751 libcall = truncdfsf2_libfunc;
752 break;
754 case XFmode:
755 libcall = extenddfxf2_libfunc;
756 break;
758 case TFmode:
759 libcall = extenddftf2_libfunc;
760 break;
762 default:
763 break;
765 break;
767 case XFmode:
768 switch (to_mode)
770 case SFmode:
771 libcall = truncxfsf2_libfunc;
772 break;
774 case DFmode:
775 libcall = truncxfdf2_libfunc;
776 break;
778 default:
779 break;
781 break;
783 case TFmode:
784 switch (to_mode)
786 case SFmode:
787 libcall = trunctfsf2_libfunc;
788 break;
790 case DFmode:
791 libcall = trunctfdf2_libfunc;
792 break;
794 default:
795 break;
797 break;
799 default:
800 break;
803 if (libcall == (rtx) 0)
804 /* This conversion is not implemented yet. */
805 abort ();
807 start_sequence ();
808 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
809 1, from, from_mode);
810 insns = get_insns ();
811 end_sequence ();
812 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
813 from));
814 return;
817 /* Now both modes are integers. */
819 /* Handle expanding beyond a word. */
820 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
821 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
823 rtx insns;
824 rtx lowpart;
825 rtx fill_value;
826 rtx lowfrom;
827 int i;
828 enum machine_mode lowpart_mode;
829 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
831 /* Try converting directly if the insn is supported. */
832 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
833 != CODE_FOR_nothing)
835 /* If FROM is a SUBREG, put it into a register. Do this
836 so that we always generate the same set of insns for
837 better cse'ing; if an intermediate assignment occurred,
838 we won't be doing the operation directly on the SUBREG. */
839 if (optimize > 0 && GET_CODE (from) == SUBREG)
840 from = force_reg (from_mode, from);
841 emit_unop_insn (code, to, from, equiv_code);
842 return;
844 /* Next, try converting via full word. */
845 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
846 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
847 != CODE_FOR_nothing))
849 if (GET_CODE (to) == REG)
850 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
851 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
852 emit_unop_insn (code, to,
853 gen_lowpart (word_mode, to), equiv_code);
854 return;
857 /* No special multiword conversion insn; do it by hand. */
858 start_sequence ();
860 /* Since we will turn this into a no conflict block, we must ensure
861 that the source does not overlap the target. */
863 if (reg_overlap_mentioned_p (to, from))
864 from = force_reg (from_mode, from);
866 /* Get a copy of FROM widened to a word, if necessary. */
867 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
868 lowpart_mode = word_mode;
869 else
870 lowpart_mode = from_mode;
872 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
874 lowpart = gen_lowpart (lowpart_mode, to);
875 emit_move_insn (lowpart, lowfrom);
877 /* Compute the value to put in each remaining word. */
878 if (unsignedp)
879 fill_value = const0_rtx;
880 else
882 #ifdef HAVE_slt
883 if (HAVE_slt
884 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
885 && STORE_FLAG_VALUE == -1)
887 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
888 lowpart_mode, 0, 0);
889 fill_value = gen_reg_rtx (word_mode);
890 emit_insn (gen_slt (fill_value));
892 else
893 #endif
895 fill_value
896 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
897 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
898 NULL_RTX, 0);
899 fill_value = convert_to_mode (word_mode, fill_value, 1);
903 /* Fill the remaining words. */
904 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
906 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
907 rtx subword = operand_subword (to, index, 1, to_mode);
909 if (subword == 0)
910 abort ();
912 if (fill_value != subword)
913 emit_move_insn (subword, fill_value);
916 insns = get_insns ();
917 end_sequence ();
919 emit_no_conflict_block (insns, to, from, NULL_RTX,
920 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
921 return;
924 /* Truncating multi-word to a word or less. */
925 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
926 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
928 if (!((GET_CODE (from) == MEM
929 && ! MEM_VOLATILE_P (from)
930 && direct_load[(int) to_mode]
931 && ! mode_dependent_address_p (XEXP (from, 0)))
932 || GET_CODE (from) == REG
933 || GET_CODE (from) == SUBREG))
934 from = force_reg (from_mode, from);
935 convert_move (to, gen_lowpart (word_mode, from), 0);
936 return;
939 /* Handle pointer conversion. */ /* SPEE 900220. */
940 if (to_mode == PQImode)
942 if (from_mode != QImode)
943 from = convert_to_mode (QImode, from, unsignedp);
945 #ifdef HAVE_truncqipqi2
946 if (HAVE_truncqipqi2)
948 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
949 return;
951 #endif /* HAVE_truncqipqi2 */
952 abort ();
955 if (from_mode == PQImode)
957 if (to_mode != QImode)
959 from = convert_to_mode (QImode, from, unsignedp);
960 from_mode = QImode;
962 else
964 #ifdef HAVE_extendpqiqi2
965 if (HAVE_extendpqiqi2)
967 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
968 return;
970 #endif /* HAVE_extendpqiqi2 */
971 abort ();
975 if (to_mode == PSImode)
977 if (from_mode != SImode)
978 from = convert_to_mode (SImode, from, unsignedp);
980 #ifdef HAVE_truncsipsi2
981 if (HAVE_truncsipsi2)
983 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
984 return;
986 #endif /* HAVE_truncsipsi2 */
987 abort ();
990 if (from_mode == PSImode)
992 if (to_mode != SImode)
994 from = convert_to_mode (SImode, from, unsignedp);
995 from_mode = SImode;
997 else
999 #ifdef HAVE_extendpsisi2
1000 if (! unsignedp && HAVE_extendpsisi2)
1002 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1003 return;
1005 #endif /* HAVE_extendpsisi2 */
1006 #ifdef HAVE_zero_extendpsisi2
1007 if (unsignedp && HAVE_zero_extendpsisi2)
1009 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1010 return;
1012 #endif /* HAVE_zero_extendpsisi2 */
1013 abort ();
1017 if (to_mode == PDImode)
1019 if (from_mode != DImode)
1020 from = convert_to_mode (DImode, from, unsignedp);
1022 #ifdef HAVE_truncdipdi2
1023 if (HAVE_truncdipdi2)
1025 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1026 return;
1028 #endif /* HAVE_truncdipdi2 */
1029 abort ();
1032 if (from_mode == PDImode)
1034 if (to_mode != DImode)
1036 from = convert_to_mode (DImode, from, unsignedp);
1037 from_mode = DImode;
1039 else
1041 #ifdef HAVE_extendpdidi2
1042 if (HAVE_extendpdidi2)
1044 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1045 return;
1047 #endif /* HAVE_extendpdidi2 */
1048 abort ();
1052 /* Now follow all the conversions between integers
1053 no more than a word long. */
1055 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1056 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1057 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1058 GET_MODE_BITSIZE (from_mode)))
1060 if (!((GET_CODE (from) == MEM
1061 && ! MEM_VOLATILE_P (from)
1062 && direct_load[(int) to_mode]
1063 && ! mode_dependent_address_p (XEXP (from, 0)))
1064 || GET_CODE (from) == REG
1065 || GET_CODE (from) == SUBREG))
1066 from = force_reg (from_mode, from);
1067 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1068 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1069 from = copy_to_reg (from);
1070 emit_move_insn (to, gen_lowpart (to_mode, from));
1071 return;
1074 /* Handle extension. */
1075 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1077 /* Convert directly if that works. */
1078 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1079 != CODE_FOR_nothing)
1081 emit_unop_insn (code, to, from, equiv_code);
1082 return;
1084 else
1086 enum machine_mode intermediate;
1087 rtx tmp;
1088 tree shift_amount;
1090 /* Search for a mode to convert via. */
1091 for (intermediate = from_mode; intermediate != VOIDmode;
1092 intermediate = GET_MODE_WIDER_MODE (intermediate))
1093 if (((can_extend_p (to_mode, intermediate, unsignedp)
1094 != CODE_FOR_nothing)
1095 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1096 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1097 GET_MODE_BITSIZE (intermediate))))
1098 && (can_extend_p (intermediate, from_mode, unsignedp)
1099 != CODE_FOR_nothing))
1101 convert_move (to, convert_to_mode (intermediate, from,
1102 unsignedp), unsignedp);
1103 return;
1106 /* No suitable intermediate mode.
1107 Generate what we need with shifts. */
1108 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1109 - GET_MODE_BITSIZE (from_mode), 0);
1110 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1111 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1112 to, unsignedp);
1113 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1114 to, unsignedp);
1115 if (tmp != to)
1116 emit_move_insn (to, tmp);
1117 return;
1121 /* Support special truncate insns for certain modes. */
1123 if (from_mode == DImode && to_mode == SImode)
1125 #ifdef HAVE_truncdisi2
1126 if (HAVE_truncdisi2)
1128 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1129 return;
1131 #endif
1132 convert_move (to, force_reg (from_mode, from), unsignedp);
1133 return;
1136 if (from_mode == DImode && to_mode == HImode)
1138 #ifdef HAVE_truncdihi2
1139 if (HAVE_truncdihi2)
1141 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1142 return;
1144 #endif
1145 convert_move (to, force_reg (from_mode, from), unsignedp);
1146 return;
1149 if (from_mode == DImode && to_mode == QImode)
1151 #ifdef HAVE_truncdiqi2
1152 if (HAVE_truncdiqi2)
1154 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1155 return;
1157 #endif
1158 convert_move (to, force_reg (from_mode, from), unsignedp);
1159 return;
1162 if (from_mode == SImode && to_mode == HImode)
1164 #ifdef HAVE_truncsihi2
1165 if (HAVE_truncsihi2)
1167 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1168 return;
1170 #endif
1171 convert_move (to, force_reg (from_mode, from), unsignedp);
1172 return;
1175 if (from_mode == SImode && to_mode == QImode)
1177 #ifdef HAVE_truncsiqi2
1178 if (HAVE_truncsiqi2)
1180 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1181 return;
1183 #endif
1184 convert_move (to, force_reg (from_mode, from), unsignedp);
1185 return;
1188 if (from_mode == HImode && to_mode == QImode)
1190 #ifdef HAVE_trunchiqi2
1191 if (HAVE_trunchiqi2)
1193 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1194 return;
1196 #endif
1197 convert_move (to, force_reg (from_mode, from), unsignedp);
1198 return;
1201 if (from_mode == TImode && to_mode == DImode)
1203 #ifdef HAVE_trunctidi2
1204 if (HAVE_trunctidi2)
1206 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1207 return;
1209 #endif
1210 convert_move (to, force_reg (from_mode, from), unsignedp);
1211 return;
1214 if (from_mode == TImode && to_mode == SImode)
1216 #ifdef HAVE_trunctisi2
1217 if (HAVE_trunctisi2)
1219 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1220 return;
1222 #endif
1223 convert_move (to, force_reg (from_mode, from), unsignedp);
1224 return;
1227 if (from_mode == TImode && to_mode == HImode)
1229 #ifdef HAVE_trunctihi2
1230 if (HAVE_trunctihi2)
1232 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1233 return;
1235 #endif
1236 convert_move (to, force_reg (from_mode, from), unsignedp);
1237 return;
1240 if (from_mode == TImode && to_mode == QImode)
1242 #ifdef HAVE_trunctiqi2
1243 if (HAVE_trunctiqi2)
1245 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1246 return;
1248 #endif
1249 convert_move (to, force_reg (from_mode, from), unsignedp);
1250 return;
1253 /* Handle truncation of volatile memrefs, and so on;
1254 the things that couldn't be truncated directly,
1255 and for which there was no special instruction. */
1256 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1258 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1259 emit_move_insn (to, temp);
1260 return;
1263 /* Mode combination is not recognized. */
1264 abort ();
1267 /* Return an rtx for a value that would result
1268 from converting X to mode MODE.
1269 Both X and MODE may be floating, or both integer.
1270 UNSIGNEDP is nonzero if X is an unsigned value.
1271 This can be done by referring to a part of X in place
1272 or by copying to a new temporary with conversion.
1274 This function *must not* call protect_from_queue
1275 except when putting X into an insn (in which case convert_move does it). */
1278 convert_to_mode (mode, x, unsignedp)
1279 enum machine_mode mode;
1280 rtx x;
1281 int unsignedp;
1283 return convert_modes (mode, VOIDmode, x, unsignedp);
1286 /* Return an rtx for a value that would result
1287 from converting X from mode OLDMODE to mode MODE.
1288 Both modes may be floating, or both integer.
1289 UNSIGNEDP is nonzero if X is an unsigned value.
1291 This can be done by referring to a part of X in place
1292 or by copying to a new temporary with conversion.
1294 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1296 This function *must not* call protect_from_queue
1297 except when putting X into an insn (in which case convert_move does it). */
1300 convert_modes (mode, oldmode, x, unsignedp)
1301 enum machine_mode mode, oldmode;
1302 rtx x;
1303 int unsignedp;
1305 register rtx temp;
1307 /* If FROM is a SUBREG that indicates that we have already done at least
1308 the required extension, strip it. */
1310 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1311 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1312 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1313 x = gen_lowpart (mode, x);
1315 if (GET_MODE (x) != VOIDmode)
1316 oldmode = GET_MODE (x);
1318 if (mode == oldmode)
1319 return x;
1321 /* There is one case that we must handle specially: If we are converting
1322 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1323 we are to interpret the constant as unsigned, gen_lowpart will do
1324 the wrong if the constant appears negative. What we want to do is
1325 make the high-order word of the constant zero, not all ones. */
1327 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1328 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1329 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1331 HOST_WIDE_INT val = INTVAL (x);
1333 if (oldmode != VOIDmode
1334 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1336 int width = GET_MODE_BITSIZE (oldmode);
1338 /* We need to zero extend VAL. */
1339 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1342 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1345 /* We can do this with a gen_lowpart if both desired and current modes
1346 are integer, and this is either a constant integer, a register, or a
1347 non-volatile MEM. Except for the constant case where MODE is no
1348 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1350 if ((GET_CODE (x) == CONST_INT
1351 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1352 || (GET_MODE_CLASS (mode) == MODE_INT
1353 && GET_MODE_CLASS (oldmode) == MODE_INT
1354 && (GET_CODE (x) == CONST_DOUBLE
1355 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1356 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1357 && direct_load[(int) mode])
1358 || (GET_CODE (x) == REG
1359 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1360 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1362 /* ?? If we don't know OLDMODE, we have to assume here that
1363 X does not need sign- or zero-extension. This may not be
1364 the case, but it's the best we can do. */
1365 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1366 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1368 HOST_WIDE_INT val = INTVAL (x);
1369 int width = GET_MODE_BITSIZE (oldmode);
1371 /* We must sign or zero-extend in this case. Start by
1372 zero-extending, then sign extend if we need to. */
1373 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1374 if (! unsignedp
1375 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1376 val |= (HOST_WIDE_INT) (-1) << width;
1378 return GEN_INT (val);
1381 return gen_lowpart (mode, x);
1384 temp = gen_reg_rtx (mode);
1385 convert_move (temp, x, unsignedp);
1386 return temp;
1389 /* This macro is used to determine what the largest unit size that
1390 move_by_pieces can use is. */
1392 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1393 move efficiently, as opposed to MOVE_MAX which is the maximum
1394 number of bytes we can move with a single instruction. */
1396 #ifndef MOVE_MAX_PIECES
1397 #define MOVE_MAX_PIECES MOVE_MAX
1398 #endif
1400 /* Generate several move instructions to copy LEN bytes
1401 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1402 The caller must pass FROM and TO
1403 through protect_from_queue before calling.
1404 ALIGN is maximum alignment we can assume. */
1406 void
1407 move_by_pieces (to, from, len, align)
1408 rtx to, from;
1409 unsigned HOST_WIDE_INT len;
1410 unsigned int align;
1412 struct move_by_pieces data;
1413 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1414 unsigned int max_size = MOVE_MAX_PIECES + 1;
1415 enum machine_mode mode = VOIDmode, tmode;
1416 enum insn_code icode;
1418 data.offset = 0;
1419 data.to_addr = to_addr;
1420 data.from_addr = from_addr;
1421 data.to = to;
1422 data.from = from;
1423 data.autinc_to
1424 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1425 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1426 data.autinc_from
1427 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1428 || GET_CODE (from_addr) == POST_INC
1429 || GET_CODE (from_addr) == POST_DEC);
1431 data.explicit_inc_from = 0;
1432 data.explicit_inc_to = 0;
1433 data.reverse
1434 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1435 if (data.reverse) data.offset = len;
1436 data.len = len;
1438 /* If copying requires more than two move insns,
1439 copy addresses to registers (to make displacements shorter)
1440 and use post-increment if available. */
1441 if (!(data.autinc_from && data.autinc_to)
1442 && move_by_pieces_ninsns (len, align) > 2)
1444 /* Find the mode of the largest move... */
1445 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1446 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1447 if (GET_MODE_SIZE (tmode) < max_size)
1448 mode = tmode;
1450 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1452 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1453 data.autinc_from = 1;
1454 data.explicit_inc_from = -1;
1456 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1458 data.from_addr = copy_addr_to_reg (from_addr);
1459 data.autinc_from = 1;
1460 data.explicit_inc_from = 1;
1462 if (!data.autinc_from && CONSTANT_P (from_addr))
1463 data.from_addr = copy_addr_to_reg (from_addr);
1464 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1466 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1467 data.autinc_to = 1;
1468 data.explicit_inc_to = -1;
1470 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1472 data.to_addr = copy_addr_to_reg (to_addr);
1473 data.autinc_to = 1;
1474 data.explicit_inc_to = 1;
1476 if (!data.autinc_to && CONSTANT_P (to_addr))
1477 data.to_addr = copy_addr_to_reg (to_addr);
1480 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1481 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1482 align = MOVE_MAX * BITS_PER_UNIT;
1484 /* First move what we can in the largest integer mode, then go to
1485 successively smaller modes. */
1487 while (max_size > 1)
1489 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1490 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1491 if (GET_MODE_SIZE (tmode) < max_size)
1492 mode = tmode;
1494 if (mode == VOIDmode)
1495 break;
1497 icode = mov_optab->handlers[(int) mode].insn_code;
1498 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1499 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1501 max_size = GET_MODE_SIZE (mode);
1504 /* The code above should have handled everything. */
1505 if (data.len > 0)
1506 abort ();
1509 /* Return number of insns required to move L bytes by pieces.
1510 ALIGN (in bytes) is maximum alignment we can assume. */
1512 static unsigned HOST_WIDE_INT
1513 move_by_pieces_ninsns (l, align)
1514 unsigned HOST_WIDE_INT l;
1515 unsigned int align;
1517 unsigned HOST_WIDE_INT n_insns = 0;
1518 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1520 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1521 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1522 align = MOVE_MAX * BITS_PER_UNIT;
1524 while (max_size > 1)
1526 enum machine_mode mode = VOIDmode, tmode;
1527 enum insn_code icode;
1529 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1530 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1531 if (GET_MODE_SIZE (tmode) < max_size)
1532 mode = tmode;
1534 if (mode == VOIDmode)
1535 break;
1537 icode = mov_optab->handlers[(int) mode].insn_code;
1538 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1539 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1541 max_size = GET_MODE_SIZE (mode);
1544 if (l)
1545 abort ();
1546 return n_insns;
1549 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1550 with move instructions for mode MODE. GENFUN is the gen_... function
1551 to make a move insn for that mode. DATA has all the other info. */
1553 static void
1554 move_by_pieces_1 (genfun, mode, data)
1555 rtx (*genfun) PARAMS ((rtx, ...));
1556 enum machine_mode mode;
1557 struct move_by_pieces *data;
1559 unsigned int size = GET_MODE_SIZE (mode);
1560 rtx to1, from1;
1562 while (data->len >= size)
1564 if (data->reverse)
1565 data->offset -= size;
1567 if (data->autinc_to)
1569 to1 = gen_rtx_MEM (mode, data->to_addr);
1570 MEM_COPY_ATTRIBUTES (to1, data->to);
1572 else
1573 to1 = change_address (data->to, mode,
1574 plus_constant (data->to_addr, data->offset));
1576 if (data->autinc_from)
1578 from1 = gen_rtx_MEM (mode, data->from_addr);
1579 MEM_COPY_ATTRIBUTES (from1, data->from);
1581 else
1582 from1 = change_address (data->from, mode,
1583 plus_constant (data->from_addr, data->offset));
1585 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1586 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1587 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1588 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1590 emit_insn ((*genfun) (to1, from1));
1592 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1593 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1594 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1595 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1597 if (! data->reverse)
1598 data->offset += size;
1600 data->len -= size;
1604 /* Emit code to move a block Y to a block X.
1605 This may be done with string-move instructions,
1606 with multiple scalar move instructions, or with a library call.
1608 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1609 with mode BLKmode.
1610 SIZE is an rtx that says how long they are.
1611 ALIGN is the maximum alignment we can assume they have.
1613 Return the address of the new block, if memcpy is called and returns it,
1614 0 otherwise. */
1617 emit_block_move (x, y, size, align)
1618 rtx x, y;
1619 rtx size;
1620 unsigned int align;
1622 rtx retval = 0;
1623 #ifdef TARGET_MEM_FUNCTIONS
1624 static tree fn;
1625 tree call_expr, arg_list;
1626 #endif
1628 if (GET_MODE (x) != BLKmode)
1629 abort ();
1631 if (GET_MODE (y) != BLKmode)
1632 abort ();
1634 x = protect_from_queue (x, 1);
1635 y = protect_from_queue (y, 0);
1636 size = protect_from_queue (size, 0);
1638 if (GET_CODE (x) != MEM)
1639 abort ();
1640 if (GET_CODE (y) != MEM)
1641 abort ();
1642 if (size == 0)
1643 abort ();
1645 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1646 move_by_pieces (x, y, INTVAL (size), align);
1647 else
1649 /* Try the most limited insn first, because there's no point
1650 including more than one in the machine description unless
1651 the more limited one has some advantage. */
1653 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1654 enum machine_mode mode;
1656 /* Since this is a move insn, we don't care about volatility. */
1657 volatile_ok = 1;
1659 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1660 mode = GET_MODE_WIDER_MODE (mode))
1662 enum insn_code code = movstr_optab[(int) mode];
1663 insn_operand_predicate_fn pred;
1665 if (code != CODE_FOR_nothing
1666 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1667 here because if SIZE is less than the mode mask, as it is
1668 returned by the macro, it will definitely be less than the
1669 actual mode mask. */
1670 && ((GET_CODE (size) == CONST_INT
1671 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1672 <= (GET_MODE_MASK (mode) >> 1)))
1673 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1674 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1675 || (*pred) (x, BLKmode))
1676 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1677 || (*pred) (y, BLKmode))
1678 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1679 || (*pred) (opalign, VOIDmode)))
1681 rtx op2;
1682 rtx last = get_last_insn ();
1683 rtx pat;
1685 op2 = convert_to_mode (mode, size, 1);
1686 pred = insn_data[(int) code].operand[2].predicate;
1687 if (pred != 0 && ! (*pred) (op2, mode))
1688 op2 = copy_to_mode_reg (mode, op2);
1690 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1691 if (pat)
1693 emit_insn (pat);
1694 volatile_ok = 0;
1695 return 0;
1697 else
1698 delete_insns_since (last);
1702 volatile_ok = 0;
1704 /* X, Y, or SIZE may have been passed through protect_from_queue.
1706 It is unsafe to save the value generated by protect_from_queue
1707 and reuse it later. Consider what happens if emit_queue is
1708 called before the return value from protect_from_queue is used.
1710 Expansion of the CALL_EXPR below will call emit_queue before
1711 we are finished emitting RTL for argument setup. So if we are
1712 not careful we could get the wrong value for an argument.
1714 To avoid this problem we go ahead and emit code to copy X, Y &
1715 SIZE into new pseudos. We can then place those new pseudos
1716 into an RTL_EXPR and use them later, even after a call to
1717 emit_queue.
1719 Note this is not strictly needed for library calls since they
1720 do not call emit_queue before loading their arguments. However,
1721 we may need to have library calls call emit_queue in the future
1722 since failing to do so could cause problems for targets which
1723 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1724 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1725 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1727 #ifdef TARGET_MEM_FUNCTIONS
1728 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1729 #else
1730 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1731 TREE_UNSIGNED (integer_type_node));
1732 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1733 #endif
1735 #ifdef TARGET_MEM_FUNCTIONS
1736 /* It is incorrect to use the libcall calling conventions to call
1737 memcpy in this context.
1739 This could be a user call to memcpy and the user may wish to
1740 examine the return value from memcpy.
1742 For targets where libcalls and normal calls have different conventions
1743 for returning pointers, we could end up generating incorrect code.
1745 So instead of using a libcall sequence we build up a suitable
1746 CALL_EXPR and expand the call in the normal fashion. */
1747 if (fn == NULL_TREE)
1749 tree fntype;
1751 /* This was copied from except.c, I don't know if all this is
1752 necessary in this context or not. */
1753 fn = get_identifier ("memcpy");
1754 fntype = build_pointer_type (void_type_node);
1755 fntype = build_function_type (fntype, NULL_TREE);
1756 fn = build_decl (FUNCTION_DECL, fn, fntype);
1757 ggc_add_tree_root (&fn, 1);
1758 DECL_EXTERNAL (fn) = 1;
1759 TREE_PUBLIC (fn) = 1;
1760 DECL_ARTIFICIAL (fn) = 1;
1761 make_decl_rtl (fn, NULL_PTR);
1762 assemble_external (fn);
1765 /* We need to make an argument list for the function call.
1767 memcpy has three arguments, the first two are void * addresses and
1768 the last is a size_t byte count for the copy. */
1769 arg_list
1770 = build_tree_list (NULL_TREE,
1771 make_tree (build_pointer_type (void_type_node), x));
1772 TREE_CHAIN (arg_list)
1773 = build_tree_list (NULL_TREE,
1774 make_tree (build_pointer_type (void_type_node), y));
1775 TREE_CHAIN (TREE_CHAIN (arg_list))
1776 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1777 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1779 /* Now we have to build up the CALL_EXPR itself. */
1780 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1781 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1782 call_expr, arg_list, NULL_TREE);
1783 TREE_SIDE_EFFECTS (call_expr) = 1;
1785 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1786 #else
1787 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1788 VOIDmode, 3, y, Pmode, x, Pmode,
1789 convert_to_mode (TYPE_MODE (integer_type_node), size,
1790 TREE_UNSIGNED (integer_type_node)),
1791 TYPE_MODE (integer_type_node));
1792 #endif
1795 return retval;
1798 /* Copy all or part of a value X into registers starting at REGNO.
1799 The number of registers to be filled is NREGS. */
1801 void
1802 move_block_to_reg (regno, x, nregs, mode)
1803 int regno;
1804 rtx x;
1805 int nregs;
1806 enum machine_mode mode;
1808 int i;
1809 #ifdef HAVE_load_multiple
1810 rtx pat;
1811 rtx last;
1812 #endif
1814 if (nregs == 0)
1815 return;
1817 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1818 x = validize_mem (force_const_mem (mode, x));
1820 /* See if the machine can do this with a load multiple insn. */
1821 #ifdef HAVE_load_multiple
1822 if (HAVE_load_multiple)
1824 last = get_last_insn ();
1825 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1826 GEN_INT (nregs));
1827 if (pat)
1829 emit_insn (pat);
1830 return;
1832 else
1833 delete_insns_since (last);
1835 #endif
1837 for (i = 0; i < nregs; i++)
1838 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1839 operand_subword_force (x, i, mode));
1842 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1843 The number of registers to be filled is NREGS. SIZE indicates the number
1844 of bytes in the object X. */
1846 void
1847 move_block_from_reg (regno, x, nregs, size)
1848 int regno;
1849 rtx x;
1850 int nregs;
1851 int size;
1853 int i;
1854 #ifdef HAVE_store_multiple
1855 rtx pat;
1856 rtx last;
1857 #endif
1858 enum machine_mode mode;
1860 /* If SIZE is that of a mode no bigger than a word, just use that
1861 mode's store operation. */
1862 if (size <= UNITS_PER_WORD
1863 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1865 emit_move_insn (change_address (x, mode, NULL),
1866 gen_rtx_REG (mode, regno));
1867 return;
1870 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1871 to the left before storing to memory. Note that the previous test
1872 doesn't handle all cases (e.g. SIZE == 3). */
1873 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1875 rtx tem = operand_subword (x, 0, 1, BLKmode);
1876 rtx shift;
1878 if (tem == 0)
1879 abort ();
1881 shift = expand_shift (LSHIFT_EXPR, word_mode,
1882 gen_rtx_REG (word_mode, regno),
1883 build_int_2 ((UNITS_PER_WORD - size)
1884 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1885 emit_move_insn (tem, shift);
1886 return;
1889 /* See if the machine can do this with a store multiple insn. */
1890 #ifdef HAVE_store_multiple
1891 if (HAVE_store_multiple)
1893 last = get_last_insn ();
1894 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1895 GEN_INT (nregs));
1896 if (pat)
1898 emit_insn (pat);
1899 return;
1901 else
1902 delete_insns_since (last);
1904 #endif
1906 for (i = 0; i < nregs; i++)
1908 rtx tem = operand_subword (x, i, 1, BLKmode);
1910 if (tem == 0)
1911 abort ();
1913 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1917 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1918 registers represented by a PARALLEL. SSIZE represents the total size of
1919 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1920 SRC in bits. */
1921 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1922 the balance will be in what would be the low-order memory addresses, i.e.
1923 left justified for big endian, right justified for little endian. This
1924 happens to be true for the targets currently using this support. If this
1925 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1926 would be needed. */
1928 void
1929 emit_group_load (dst, orig_src, ssize, align)
1930 rtx dst, orig_src;
1931 unsigned int align;
1932 int ssize;
1934 rtx *tmps, src;
1935 int start, i;
1937 if (GET_CODE (dst) != PARALLEL)
1938 abort ();
1940 /* Check for a NULL entry, used to indicate that the parameter goes
1941 both on the stack and in registers. */
1942 if (XEXP (XVECEXP (dst, 0, 0), 0))
1943 start = 0;
1944 else
1945 start = 1;
1947 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1949 /* If we won't be loading directly from memory, protect the real source
1950 from strange tricks we might play. */
1951 src = orig_src;
1952 if (GET_CODE (src) != MEM && ! CONSTANT_P (src))
1954 if (GET_MODE (src) == VOIDmode)
1955 src = gen_reg_rtx (GET_MODE (dst));
1956 else
1957 src = gen_reg_rtx (GET_MODE (orig_src));
1958 emit_move_insn (src, orig_src);
1961 /* Process the pieces. */
1962 for (i = start; i < XVECLEN (dst, 0); i++)
1964 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1965 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1966 unsigned int bytelen = GET_MODE_SIZE (mode);
1967 int shift = 0;
1969 /* Handle trailing fragments that run over the size of the struct. */
1970 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1972 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1973 bytelen = ssize - bytepos;
1974 if (bytelen <= 0)
1975 abort ();
1978 /* Optimize the access just a bit. */
1979 if (GET_CODE (src) == MEM
1980 && align >= GET_MODE_ALIGNMENT (mode)
1981 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1982 && bytelen == GET_MODE_SIZE (mode))
1984 tmps[i] = gen_reg_rtx (mode);
1985 emit_move_insn (tmps[i],
1986 change_address (src, mode,
1987 plus_constant (XEXP (src, 0),
1988 bytepos)));
1990 else if (GET_CODE (src) == CONCAT)
1992 if (bytepos == 0
1993 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1994 tmps[i] = XEXP (src, 0);
1995 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1996 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1997 tmps[i] = XEXP (src, 1);
1998 else
1999 abort ();
2001 else if ((CONSTANT_P (src)
2002 && (GET_MODE (src) == VOIDmode || GET_MODE (src) == mode))
2003 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2004 tmps[i] = src;
2005 else
2006 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2007 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2008 mode, mode, align, ssize);
2010 if (BYTES_BIG_ENDIAN && shift)
2011 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2012 tmps[i], 0, OPTAB_WIDEN);
2015 emit_queue ();
2017 /* Copy the extracted pieces into the proper (probable) hard regs. */
2018 for (i = start; i < XVECLEN (dst, 0); i++)
2019 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2022 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2023 registers represented by a PARALLEL. SSIZE represents the total size of
2024 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2026 void
2027 emit_group_store (orig_dst, src, ssize, align)
2028 rtx orig_dst, src;
2029 int ssize;
2030 unsigned int align;
2032 rtx *tmps, dst;
2033 int start, i;
2035 if (GET_CODE (src) != PARALLEL)
2036 abort ();
2038 /* Check for a NULL entry, used to indicate that the parameter goes
2039 both on the stack and in registers. */
2040 if (XEXP (XVECEXP (src, 0, 0), 0))
2041 start = 0;
2042 else
2043 start = 1;
2045 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2047 /* Copy the (probable) hard regs into pseudos. */
2048 for (i = start; i < XVECLEN (src, 0); i++)
2050 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2051 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2052 emit_move_insn (tmps[i], reg);
2054 emit_queue ();
2056 /* If we won't be storing directly into memory, protect the real destination
2057 from strange tricks we might play. */
2058 dst = orig_dst;
2059 if (GET_CODE (dst) == PARALLEL)
2061 rtx temp;
2063 /* We can get a PARALLEL dst if there is a conditional expression in
2064 a return statement. In that case, the dst and src are the same,
2065 so no action is necessary. */
2066 if (rtx_equal_p (dst, src))
2067 return;
2069 /* It is unclear if we can ever reach here, but we may as well handle
2070 it. Allocate a temporary, and split this into a store/load to/from
2071 the temporary. */
2073 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2074 emit_group_store (temp, src, ssize, align);
2075 emit_group_load (dst, temp, ssize, align);
2076 return;
2078 else if (GET_CODE (dst) != MEM)
2080 dst = gen_reg_rtx (GET_MODE (orig_dst));
2081 /* Make life a bit easier for combine. */
2082 emit_move_insn (dst, const0_rtx);
2085 /* Process the pieces. */
2086 for (i = start; i < XVECLEN (src, 0); i++)
2088 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2089 enum machine_mode mode = GET_MODE (tmps[i]);
2090 unsigned int bytelen = GET_MODE_SIZE (mode);
2092 /* Handle trailing fragments that run over the size of the struct. */
2093 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2095 if (BYTES_BIG_ENDIAN)
2097 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2098 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2099 tmps[i], 0, OPTAB_WIDEN);
2101 bytelen = ssize - bytepos;
2104 /* Optimize the access just a bit. */
2105 if (GET_CODE (dst) == MEM
2106 && align >= GET_MODE_ALIGNMENT (mode)
2107 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2108 && bytelen == GET_MODE_SIZE (mode))
2109 emit_move_insn (change_address (dst, mode,
2110 plus_constant (XEXP (dst, 0),
2111 bytepos)),
2112 tmps[i]);
2113 else
2114 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2115 mode, tmps[i], align, ssize);
2118 emit_queue ();
2120 /* Copy from the pseudo into the (probable) hard reg. */
2121 if (GET_CODE (dst) == REG)
2122 emit_move_insn (orig_dst, dst);
2125 /* Generate code to copy a BLKmode object of TYPE out of a
2126 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2127 is null, a stack temporary is created. TGTBLK is returned.
2129 The primary purpose of this routine is to handle functions
2130 that return BLKmode structures in registers. Some machines
2131 (the PA for example) want to return all small structures
2132 in registers regardless of the structure's alignment. */
2135 copy_blkmode_from_reg (tgtblk, srcreg, type)
2136 rtx tgtblk;
2137 rtx srcreg;
2138 tree type;
2140 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2141 rtx src = NULL, dst = NULL;
2142 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2143 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2145 if (tgtblk == 0)
2147 tgtblk = assign_temp (build_qualified_type (type,
2148 (TYPE_QUALS (type)
2149 | TYPE_QUAL_CONST)),
2150 0, 1, 1);
2151 preserve_temp_slots (tgtblk);
2154 /* This code assumes srcreg is at least a full word. If it isn't,
2155 copy it into a new pseudo which is a full word. */
2156 if (GET_MODE (srcreg) != BLKmode
2157 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2158 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2160 /* Structures whose size is not a multiple of a word are aligned
2161 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2162 machine, this means we must skip the empty high order bytes when
2163 calculating the bit offset. */
2164 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2165 big_endian_correction
2166 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2168 /* Copy the structure BITSIZE bites at a time.
2170 We could probably emit more efficient code for machines which do not use
2171 strict alignment, but it doesn't seem worth the effort at the current
2172 time. */
2173 for (bitpos = 0, xbitpos = big_endian_correction;
2174 bitpos < bytes * BITS_PER_UNIT;
2175 bitpos += bitsize, xbitpos += bitsize)
2177 /* We need a new source operand each time xbitpos is on a
2178 word boundary and when xbitpos == big_endian_correction
2179 (the first time through). */
2180 if (xbitpos % BITS_PER_WORD == 0
2181 || xbitpos == big_endian_correction)
2182 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, BLKmode);
2184 /* We need a new destination operand each time bitpos is on
2185 a word boundary. */
2186 if (bitpos % BITS_PER_WORD == 0)
2187 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2189 /* Use xbitpos for the source extraction (right justified) and
2190 xbitpos for the destination store (left justified). */
2191 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2192 extract_bit_field (src, bitsize,
2193 xbitpos % BITS_PER_WORD, 1,
2194 NULL_RTX, word_mode, word_mode,
2195 bitsize, BITS_PER_WORD),
2196 bitsize, BITS_PER_WORD);
2199 return tgtblk;
2202 /* Add a USE expression for REG to the (possibly empty) list pointed
2203 to by CALL_FUSAGE. REG must denote a hard register. */
2205 void
2206 use_reg (call_fusage, reg)
2207 rtx *call_fusage, reg;
2209 if (GET_CODE (reg) != REG
2210 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2211 abort ();
2213 *call_fusage
2214 = gen_rtx_EXPR_LIST (VOIDmode,
2215 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2218 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2219 starting at REGNO. All of these registers must be hard registers. */
2221 void
2222 use_regs (call_fusage, regno, nregs)
2223 rtx *call_fusage;
2224 int regno;
2225 int nregs;
2227 int i;
2229 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2230 abort ();
2232 for (i = 0; i < nregs; i++)
2233 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2236 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2237 PARALLEL REGS. This is for calls that pass values in multiple
2238 non-contiguous locations. The Irix 6 ABI has examples of this. */
2240 void
2241 use_group_regs (call_fusage, regs)
2242 rtx *call_fusage;
2243 rtx regs;
2245 int i;
2247 for (i = 0; i < XVECLEN (regs, 0); i++)
2249 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2251 /* A NULL entry means the parameter goes both on the stack and in
2252 registers. This can also be a MEM for targets that pass values
2253 partially on the stack and partially in registers. */
2254 if (reg != 0 && GET_CODE (reg) == REG)
2255 use_reg (call_fusage, reg);
2261 can_store_by_pieces (len, constfun, constfundata, align)
2262 unsigned HOST_WIDE_INT len;
2263 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2264 PTR constfundata;
2265 unsigned int align;
2267 unsigned HOST_WIDE_INT max_size, l;
2268 HOST_WIDE_INT offset = 0;
2269 enum machine_mode mode, tmode;
2270 enum insn_code icode;
2271 int reverse;
2272 rtx cst;
2274 if (! MOVE_BY_PIECES_P (len, align))
2275 return 0;
2277 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2278 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2279 align = MOVE_MAX * BITS_PER_UNIT;
2281 /* We would first store what we can in the largest integer mode, then go to
2282 successively smaller modes. */
2284 for (reverse = 0;
2285 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2286 reverse++)
2288 l = len;
2289 mode = VOIDmode;
2290 max_size = MOVE_MAX_PIECES + 1;
2291 while (max_size > 1)
2293 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2294 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2295 if (GET_MODE_SIZE (tmode) < max_size)
2296 mode = tmode;
2298 if (mode == VOIDmode)
2299 break;
2301 icode = mov_optab->handlers[(int) mode].insn_code;
2302 if (icode != CODE_FOR_nothing
2303 && align >= GET_MODE_ALIGNMENT (mode))
2305 unsigned int size = GET_MODE_SIZE (mode);
2307 while (l >= size)
2309 if (reverse)
2310 offset -= size;
2312 cst = (*constfun) (constfundata, offset, mode);
2313 if (!LEGITIMATE_CONSTANT_P (cst))
2314 return 0;
2316 if (!reverse)
2317 offset += size;
2319 l -= size;
2323 max_size = GET_MODE_SIZE (mode);
2326 /* The code above should have handled everything. */
2327 if (l != 0)
2328 abort ();
2331 return 1;
2334 /* Generate several move instructions to store LEN bytes generated by
2335 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2336 pointer which will be passed as argument in every CONSTFUN call.
2337 ALIGN is maximum alignment we can assume. */
2339 void
2340 store_by_pieces (to, len, constfun, constfundata, align)
2341 rtx to;
2342 unsigned HOST_WIDE_INT len;
2343 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2344 PTR constfundata;
2345 unsigned int align;
2347 struct store_by_pieces data;
2349 if (! MOVE_BY_PIECES_P (len, align))
2350 abort ();
2351 to = protect_from_queue (to, 1);
2352 data.constfun = constfun;
2353 data.constfundata = constfundata;
2354 data.len = len;
2355 data.to = to;
2356 store_by_pieces_1 (&data, align);
2359 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2360 rtx with BLKmode). The caller must pass TO through protect_from_queue
2361 before calling. ALIGN is maximum alignment we can assume. */
2363 static void
2364 clear_by_pieces (to, len, align)
2365 rtx to;
2366 unsigned HOST_WIDE_INT len;
2367 unsigned int align;
2369 struct store_by_pieces data;
2371 data.constfun = clear_by_pieces_1;
2372 data.constfundata = NULL_PTR;
2373 data.len = len;
2374 data.to = to;
2375 store_by_pieces_1 (&data, align);
2378 /* Callback routine for clear_by_pieces.
2379 Return const0_rtx unconditionally. */
2381 static rtx
2382 clear_by_pieces_1 (data, offset, mode)
2383 PTR data ATTRIBUTE_UNUSED;
2384 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2385 enum machine_mode mode ATTRIBUTE_UNUSED;
2387 return const0_rtx;
2390 /* Subroutine of clear_by_pieces and store_by_pieces.
2391 Generate several move instructions to store LEN bytes of block TO. (A MEM
2392 rtx with BLKmode). The caller must pass TO through protect_from_queue
2393 before calling. ALIGN is maximum alignment we can assume. */
2395 static void
2396 store_by_pieces_1 (data, align)
2397 struct store_by_pieces *data;
2398 unsigned int align;
2400 rtx to_addr = XEXP (data->to, 0);
2401 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2402 enum machine_mode mode = VOIDmode, tmode;
2403 enum insn_code icode;
2405 data->offset = 0;
2406 data->to_addr = to_addr;
2407 data->autinc_to
2408 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2409 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2411 data->explicit_inc_to = 0;
2412 data->reverse
2413 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2414 if (data->reverse)
2415 data->offset = data->len;
2417 /* If storing requires more than two move insns,
2418 copy addresses to registers (to make displacements shorter)
2419 and use post-increment if available. */
2420 if (!data->autinc_to
2421 && move_by_pieces_ninsns (data->len, align) > 2)
2423 /* Determine the main mode we'll be using. */
2424 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2425 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2426 if (GET_MODE_SIZE (tmode) < max_size)
2427 mode = tmode;
2429 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2431 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2432 data->autinc_to = 1;
2433 data->explicit_inc_to = -1;
2436 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2437 && ! data->autinc_to)
2439 data->to_addr = copy_addr_to_reg (to_addr);
2440 data->autinc_to = 1;
2441 data->explicit_inc_to = 1;
2444 if ( !data->autinc_to && CONSTANT_P (to_addr))
2445 data->to_addr = copy_addr_to_reg (to_addr);
2448 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2449 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2450 align = MOVE_MAX * BITS_PER_UNIT;
2452 /* First store what we can in the largest integer mode, then go to
2453 successively smaller modes. */
2455 while (max_size > 1)
2457 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2458 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2459 if (GET_MODE_SIZE (tmode) < max_size)
2460 mode = tmode;
2462 if (mode == VOIDmode)
2463 break;
2465 icode = mov_optab->handlers[(int) mode].insn_code;
2466 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2467 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2469 max_size = GET_MODE_SIZE (mode);
2472 /* The code above should have handled everything. */
2473 if (data->len != 0)
2474 abort ();
2477 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2478 with move instructions for mode MODE. GENFUN is the gen_... function
2479 to make a move insn for that mode. DATA has all the other info. */
2481 static void
2482 store_by_pieces_2 (genfun, mode, data)
2483 rtx (*genfun) PARAMS ((rtx, ...));
2484 enum machine_mode mode;
2485 struct store_by_pieces *data;
2487 unsigned int size = GET_MODE_SIZE (mode);
2488 rtx to1, cst;
2490 while (data->len >= size)
2492 if (data->reverse)
2493 data->offset -= size;
2495 if (data->autinc_to)
2497 to1 = gen_rtx_MEM (mode, data->to_addr);
2498 MEM_COPY_ATTRIBUTES (to1, data->to);
2500 else
2501 to1 = change_address (data->to, mode,
2502 plus_constant (data->to_addr, data->offset));
2504 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2505 emit_insn (gen_add2_insn (data->to_addr,
2506 GEN_INT (-(HOST_WIDE_INT) size)));
2508 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2509 emit_insn ((*genfun) (to1, cst));
2511 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2512 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2514 if (! data->reverse)
2515 data->offset += size;
2517 data->len -= size;
2521 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2522 its length in bytes and ALIGN is the maximum alignment we can is has.
2524 If we call a function that returns the length of the block, return it. */
2527 clear_storage (object, size, align)
2528 rtx object;
2529 rtx size;
2530 unsigned int align;
2532 #ifdef TARGET_MEM_FUNCTIONS
2533 static tree fn;
2534 tree call_expr, arg_list;
2535 #endif
2536 rtx retval = 0;
2538 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2539 just move a zero. Otherwise, do this a piece at a time. */
2540 if (GET_MODE (object) != BLKmode
2541 && GET_CODE (size) == CONST_INT
2542 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2543 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2544 else
2546 object = protect_from_queue (object, 1);
2547 size = protect_from_queue (size, 0);
2549 if (GET_CODE (size) == CONST_INT
2550 && MOVE_BY_PIECES_P (INTVAL (size), align))
2551 clear_by_pieces (object, INTVAL (size), align);
2552 else
2554 /* Try the most limited insn first, because there's no point
2555 including more than one in the machine description unless
2556 the more limited one has some advantage. */
2558 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2559 enum machine_mode mode;
2561 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2562 mode = GET_MODE_WIDER_MODE (mode))
2564 enum insn_code code = clrstr_optab[(int) mode];
2565 insn_operand_predicate_fn pred;
2567 if (code != CODE_FOR_nothing
2568 /* We don't need MODE to be narrower than
2569 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2570 the mode mask, as it is returned by the macro, it will
2571 definitely be less than the actual mode mask. */
2572 && ((GET_CODE (size) == CONST_INT
2573 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2574 <= (GET_MODE_MASK (mode) >> 1)))
2575 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2576 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2577 || (*pred) (object, BLKmode))
2578 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2579 || (*pred) (opalign, VOIDmode)))
2581 rtx op1;
2582 rtx last = get_last_insn ();
2583 rtx pat;
2585 op1 = convert_to_mode (mode, size, 1);
2586 pred = insn_data[(int) code].operand[1].predicate;
2587 if (pred != 0 && ! (*pred) (op1, mode))
2588 op1 = copy_to_mode_reg (mode, op1);
2590 pat = GEN_FCN ((int) code) (object, op1, opalign);
2591 if (pat)
2593 emit_insn (pat);
2594 return 0;
2596 else
2597 delete_insns_since (last);
2601 /* OBJECT or SIZE may have been passed through protect_from_queue.
2603 It is unsafe to save the value generated by protect_from_queue
2604 and reuse it later. Consider what happens if emit_queue is
2605 called before the return value from protect_from_queue is used.
2607 Expansion of the CALL_EXPR below will call emit_queue before
2608 we are finished emitting RTL for argument setup. So if we are
2609 not careful we could get the wrong value for an argument.
2611 To avoid this problem we go ahead and emit code to copy OBJECT
2612 and SIZE into new pseudos. We can then place those new pseudos
2613 into an RTL_EXPR and use them later, even after a call to
2614 emit_queue.
2616 Note this is not strictly needed for library calls since they
2617 do not call emit_queue before loading their arguments. However,
2618 we may need to have library calls call emit_queue in the future
2619 since failing to do so could cause problems for targets which
2620 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2621 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2623 #ifdef TARGET_MEM_FUNCTIONS
2624 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2625 #else
2626 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2627 TREE_UNSIGNED (integer_type_node));
2628 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2629 #endif
2631 #ifdef TARGET_MEM_FUNCTIONS
2632 /* It is incorrect to use the libcall calling conventions to call
2633 memset in this context.
2635 This could be a user call to memset and the user may wish to
2636 examine the return value from memset.
2638 For targets where libcalls and normal calls have different
2639 conventions for returning pointers, we could end up generating
2640 incorrect code.
2642 So instead of using a libcall sequence we build up a suitable
2643 CALL_EXPR and expand the call in the normal fashion. */
2644 if (fn == NULL_TREE)
2646 tree fntype;
2648 /* This was copied from except.c, I don't know if all this is
2649 necessary in this context or not. */
2650 fn = get_identifier ("memset");
2651 fntype = build_pointer_type (void_type_node);
2652 fntype = build_function_type (fntype, NULL_TREE);
2653 fn = build_decl (FUNCTION_DECL, fn, fntype);
2654 ggc_add_tree_root (&fn, 1);
2655 DECL_EXTERNAL (fn) = 1;
2656 TREE_PUBLIC (fn) = 1;
2657 DECL_ARTIFICIAL (fn) = 1;
2658 make_decl_rtl (fn, NULL_PTR);
2659 assemble_external (fn);
2662 /* We need to make an argument list for the function call.
2664 memset has three arguments, the first is a void * addresses, the
2665 second a integer with the initialization value, the last is a
2666 size_t byte count for the copy. */
2667 arg_list
2668 = build_tree_list (NULL_TREE,
2669 make_tree (build_pointer_type (void_type_node),
2670 object));
2671 TREE_CHAIN (arg_list)
2672 = build_tree_list (NULL_TREE,
2673 make_tree (integer_type_node, const0_rtx));
2674 TREE_CHAIN (TREE_CHAIN (arg_list))
2675 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2676 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2678 /* Now we have to build up the CALL_EXPR itself. */
2679 call_expr = build1 (ADDR_EXPR,
2680 build_pointer_type (TREE_TYPE (fn)), fn);
2681 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2682 call_expr, arg_list, NULL_TREE);
2683 TREE_SIDE_EFFECTS (call_expr) = 1;
2685 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2686 #else
2687 emit_library_call (bzero_libfunc, LCT_NORMAL,
2688 VOIDmode, 2, object, Pmode, size,
2689 TYPE_MODE (integer_type_node));
2690 #endif
2694 return retval;
2697 /* Generate code to copy Y into X.
2698 Both Y and X must have the same mode, except that
2699 Y can be a constant with VOIDmode.
2700 This mode cannot be BLKmode; use emit_block_move for that.
2702 Return the last instruction emitted. */
2705 emit_move_insn (x, y)
2706 rtx x, y;
2708 enum machine_mode mode = GET_MODE (x);
2710 x = protect_from_queue (x, 1);
2711 y = protect_from_queue (y, 0);
2713 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2714 abort ();
2716 /* Never force constant_p_rtx to memory. */
2717 if (GET_CODE (y) == CONSTANT_P_RTX)
2719 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2720 y = force_const_mem (mode, y);
2722 /* If X or Y are memory references, verify that their addresses are valid
2723 for the machine. */
2724 if (GET_CODE (x) == MEM
2725 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2726 && ! push_operand (x, GET_MODE (x)))
2727 || (flag_force_addr
2728 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2729 x = change_address (x, VOIDmode, XEXP (x, 0));
2731 if (GET_CODE (y) == MEM
2732 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2733 || (flag_force_addr
2734 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2735 y = change_address (y, VOIDmode, XEXP (y, 0));
2737 if (mode == BLKmode)
2738 abort ();
2740 return emit_move_insn_1 (x, y);
2743 /* Low level part of emit_move_insn.
2744 Called just like emit_move_insn, but assumes X and Y
2745 are basically valid. */
2748 emit_move_insn_1 (x, y)
2749 rtx x, y;
2751 enum machine_mode mode = GET_MODE (x);
2752 enum machine_mode submode;
2753 enum mode_class class = GET_MODE_CLASS (mode);
2754 unsigned int i;
2756 if (mode >= MAX_MACHINE_MODE)
2757 abort ();
2759 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2760 return
2761 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2763 /* Expand complex moves by moving real part and imag part, if possible. */
2764 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2765 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2766 * BITS_PER_UNIT),
2767 (class == MODE_COMPLEX_INT
2768 ? MODE_INT : MODE_FLOAT),
2770 && (mov_optab->handlers[(int) submode].insn_code
2771 != CODE_FOR_nothing))
2773 /* Don't split destination if it is a stack push. */
2774 int stack = push_operand (x, GET_MODE (x));
2776 /* If this is a stack, push the highpart first, so it
2777 will be in the argument order.
2779 In that case, change_address is used only to convert
2780 the mode, not to change the address. */
2781 if (stack)
2783 /* Note that the real part always precedes the imag part in memory
2784 regardless of machine's endianness. */
2785 #ifdef STACK_GROWS_DOWNWARD
2786 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2787 (gen_rtx_MEM (submode, XEXP (x, 0)),
2788 gen_imagpart (submode, y)));
2789 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2790 (gen_rtx_MEM (submode, XEXP (x, 0)),
2791 gen_realpart (submode, y)));
2792 #else
2793 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2794 (gen_rtx_MEM (submode, XEXP (x, 0)),
2795 gen_realpart (submode, y)));
2796 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2797 (gen_rtx_MEM (submode, XEXP (x, 0)),
2798 gen_imagpart (submode, y)));
2799 #endif
2801 else
2803 rtx realpart_x, realpart_y;
2804 rtx imagpart_x, imagpart_y;
2806 /* If this is a complex value with each part being smaller than a
2807 word, the usual calling sequence will likely pack the pieces into
2808 a single register. Unfortunately, SUBREG of hard registers only
2809 deals in terms of words, so we have a problem converting input
2810 arguments to the CONCAT of two registers that is used elsewhere
2811 for complex values. If this is before reload, we can copy it into
2812 memory and reload. FIXME, we should see about using extract and
2813 insert on integer registers, but complex short and complex char
2814 variables should be rarely used. */
2815 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2816 && (reload_in_progress | reload_completed) == 0)
2818 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2819 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2821 if (packed_dest_p || packed_src_p)
2823 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2824 ? MODE_FLOAT : MODE_INT);
2826 enum machine_mode reg_mode
2827 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2829 if (reg_mode != BLKmode)
2831 rtx mem = assign_stack_temp (reg_mode,
2832 GET_MODE_SIZE (mode), 0);
2833 rtx cmem = change_address (mem, mode, NULL_RTX);
2835 cfun->cannot_inline
2836 = N_("function using short complex types cannot be inline");
2838 if (packed_dest_p)
2840 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2841 emit_move_insn_1 (cmem, y);
2842 return emit_move_insn_1 (sreg, mem);
2844 else
2846 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2847 emit_move_insn_1 (mem, sreg);
2848 return emit_move_insn_1 (x, cmem);
2854 realpart_x = gen_realpart (submode, x);
2855 realpart_y = gen_realpart (submode, y);
2856 imagpart_x = gen_imagpart (submode, x);
2857 imagpart_y = gen_imagpart (submode, y);
2859 /* Show the output dies here. This is necessary for SUBREGs
2860 of pseudos since we cannot track their lifetimes correctly;
2861 hard regs shouldn't appear here except as return values.
2862 We never want to emit such a clobber after reload. */
2863 if (x != y
2864 && ! (reload_in_progress || reload_completed)
2865 && (GET_CODE (realpart_x) == SUBREG
2866 || GET_CODE (imagpart_x) == SUBREG))
2868 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2871 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2872 (realpart_x, realpart_y));
2873 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2874 (imagpart_x, imagpart_y));
2877 return get_last_insn ();
2880 /* This will handle any multi-word mode that lacks a move_insn pattern.
2881 However, you will get better code if you define such patterns,
2882 even if they must turn into multiple assembler instructions. */
2883 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2885 rtx last_insn = 0;
2886 rtx seq, inner;
2887 int need_clobber;
2889 #ifdef PUSH_ROUNDING
2891 /* If X is a push on the stack, do the push now and replace
2892 X with a reference to the stack pointer. */
2893 if (push_operand (x, GET_MODE (x)))
2895 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2896 x = change_address (x, VOIDmode, stack_pointer_rtx);
2898 #endif
2900 /* If we are in reload, see if either operand is a MEM whose address
2901 is scheduled for replacement. */
2902 if (reload_in_progress && GET_CODE (x) == MEM
2903 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2905 rtx new = gen_rtx_MEM (GET_MODE (x), inner);
2907 MEM_COPY_ATTRIBUTES (new, x);
2908 x = new;
2910 if (reload_in_progress && GET_CODE (y) == MEM
2911 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2913 rtx new = gen_rtx_MEM (GET_MODE (y), inner);
2915 MEM_COPY_ATTRIBUTES (new, y);
2916 y = new;
2919 start_sequence ();
2921 need_clobber = 0;
2922 for (i = 0;
2923 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2924 i++)
2926 rtx xpart = operand_subword (x, i, 1, mode);
2927 rtx ypart = operand_subword (y, i, 1, mode);
2929 /* If we can't get a part of Y, put Y into memory if it is a
2930 constant. Otherwise, force it into a register. If we still
2931 can't get a part of Y, abort. */
2932 if (ypart == 0 && CONSTANT_P (y))
2934 y = force_const_mem (mode, y);
2935 ypart = operand_subword (y, i, 1, mode);
2937 else if (ypart == 0)
2938 ypart = operand_subword_force (y, i, mode);
2940 if (xpart == 0 || ypart == 0)
2941 abort ();
2943 need_clobber |= (GET_CODE (xpart) == SUBREG);
2945 last_insn = emit_move_insn (xpart, ypart);
2948 seq = gen_sequence ();
2949 end_sequence ();
2951 /* Show the output dies here. This is necessary for SUBREGs
2952 of pseudos since we cannot track their lifetimes correctly;
2953 hard regs shouldn't appear here except as return values.
2954 We never want to emit such a clobber after reload. */
2955 if (x != y
2956 && ! (reload_in_progress || reload_completed)
2957 && need_clobber != 0)
2959 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2962 emit_insn (seq);
2964 return last_insn;
2966 else
2967 abort ();
2970 /* Pushing data onto the stack. */
2972 /* Push a block of length SIZE (perhaps variable)
2973 and return an rtx to address the beginning of the block.
2974 Note that it is not possible for the value returned to be a QUEUED.
2975 The value may be virtual_outgoing_args_rtx.
2977 EXTRA is the number of bytes of padding to push in addition to SIZE.
2978 BELOW nonzero means this padding comes at low addresses;
2979 otherwise, the padding comes at high addresses. */
2982 push_block (size, extra, below)
2983 rtx size;
2984 int extra, below;
2986 register rtx temp;
2988 size = convert_modes (Pmode, ptr_mode, size, 1);
2989 if (CONSTANT_P (size))
2990 anti_adjust_stack (plus_constant (size, extra));
2991 else if (GET_CODE (size) == REG && extra == 0)
2992 anti_adjust_stack (size);
2993 else
2995 temp = copy_to_mode_reg (Pmode, size);
2996 if (extra != 0)
2997 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2998 temp, 0, OPTAB_LIB_WIDEN);
2999 anti_adjust_stack (temp);
3002 #ifndef STACK_GROWS_DOWNWARD
3003 #ifdef ARGS_GROW_DOWNWARD
3004 if (!ACCUMULATE_OUTGOING_ARGS)
3005 #else
3006 if (0)
3007 #endif
3008 #else
3009 if (1)
3010 #endif
3012 /* Return the lowest stack address when STACK or ARGS grow downward and
3013 we are not aaccumulating outgoing arguments (the c4x port uses such
3014 conventions). */
3015 temp = virtual_outgoing_args_rtx;
3016 if (extra != 0 && below)
3017 temp = plus_constant (temp, extra);
3019 else
3021 if (GET_CODE (size) == CONST_INT)
3022 temp = plus_constant (virtual_outgoing_args_rtx,
3023 -INTVAL (size) - (below ? 0 : extra));
3024 else if (extra != 0 && !below)
3025 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3026 negate_rtx (Pmode, plus_constant (size, extra)));
3027 else
3028 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3029 negate_rtx (Pmode, size));
3032 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3036 gen_push_operand ()
3038 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3041 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
3042 block of SIZE bytes. */
3044 static rtx
3045 get_push_address (size)
3046 int size;
3048 register rtx temp;
3050 if (STACK_PUSH_CODE == POST_DEC)
3051 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3052 else if (STACK_PUSH_CODE == POST_INC)
3053 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3054 else
3055 temp = stack_pointer_rtx;
3057 return copy_to_reg (temp);
3060 /* Generate code to push X onto the stack, assuming it has mode MODE and
3061 type TYPE.
3062 MODE is redundant except when X is a CONST_INT (since they don't
3063 carry mode info).
3064 SIZE is an rtx for the size of data to be copied (in bytes),
3065 needed only if X is BLKmode.
3067 ALIGN is maximum alignment we can assume.
3069 If PARTIAL and REG are both nonzero, then copy that many of the first
3070 words of X into registers starting with REG, and push the rest of X.
3071 The amount of space pushed is decreased by PARTIAL words,
3072 rounded *down* to a multiple of PARM_BOUNDARY.
3073 REG must be a hard register in this case.
3074 If REG is zero but PARTIAL is not, take any all others actions for an
3075 argument partially in registers, but do not actually load any
3076 registers.
3078 EXTRA is the amount in bytes of extra space to leave next to this arg.
3079 This is ignored if an argument block has already been allocated.
3081 On a machine that lacks real push insns, ARGS_ADDR is the address of
3082 the bottom of the argument block for this call. We use indexing off there
3083 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3084 argument block has not been preallocated.
3086 ARGS_SO_FAR is the size of args previously pushed for this call.
3088 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3089 for arguments passed in registers. If nonzero, it will be the number
3090 of bytes required. */
3092 void
3093 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3094 args_addr, args_so_far, reg_parm_stack_space,
3095 alignment_pad)
3096 register rtx x;
3097 enum machine_mode mode;
3098 tree type;
3099 rtx size;
3100 unsigned int align;
3101 int partial;
3102 rtx reg;
3103 int extra;
3104 rtx args_addr;
3105 rtx args_so_far;
3106 int reg_parm_stack_space;
3107 rtx alignment_pad;
3109 rtx xinner;
3110 enum direction stack_direction
3111 #ifdef STACK_GROWS_DOWNWARD
3112 = downward;
3113 #else
3114 = upward;
3115 #endif
3117 /* Decide where to pad the argument: `downward' for below,
3118 `upward' for above, or `none' for don't pad it.
3119 Default is below for small data on big-endian machines; else above. */
3120 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3122 /* Invert direction if stack is post-update. */
3123 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
3124 if (where_pad != none)
3125 where_pad = (where_pad == downward ? upward : downward);
3127 xinner = x = protect_from_queue (x, 0);
3129 if (mode == BLKmode)
3131 /* Copy a block into the stack, entirely or partially. */
3133 register rtx temp;
3134 int used = partial * UNITS_PER_WORD;
3135 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3136 int skip;
3138 if (size == 0)
3139 abort ();
3141 used -= offset;
3143 /* USED is now the # of bytes we need not copy to the stack
3144 because registers will take care of them. */
3146 if (partial != 0)
3147 xinner = change_address (xinner, BLKmode,
3148 plus_constant (XEXP (xinner, 0), used));
3150 /* If the partial register-part of the arg counts in its stack size,
3151 skip the part of stack space corresponding to the registers.
3152 Otherwise, start copying to the beginning of the stack space,
3153 by setting SKIP to 0. */
3154 skip = (reg_parm_stack_space == 0) ? 0 : used;
3156 #ifdef PUSH_ROUNDING
3157 /* Do it with several push insns if that doesn't take lots of insns
3158 and if there is no difficulty with push insns that skip bytes
3159 on the stack for alignment purposes. */
3160 if (args_addr == 0
3161 && PUSH_ARGS
3162 && GET_CODE (size) == CONST_INT
3163 && skip == 0
3164 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3165 /* Here we avoid the case of a structure whose weak alignment
3166 forces many pushes of a small amount of data,
3167 and such small pushes do rounding that causes trouble. */
3168 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3169 || align >= BIGGEST_ALIGNMENT
3170 || PUSH_ROUNDING (align) == align)
3171 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3173 /* Push padding now if padding above and stack grows down,
3174 or if padding below and stack grows up.
3175 But if space already allocated, this has already been done. */
3176 if (extra && args_addr == 0
3177 && where_pad != none && where_pad != stack_direction)
3178 anti_adjust_stack (GEN_INT (extra));
3180 stack_pointer_delta += INTVAL (size) - used;
3181 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
3182 INTVAL (size) - used, align);
3184 if (current_function_check_memory_usage && ! in_check_memory_usage)
3186 rtx temp;
3188 in_check_memory_usage = 1;
3189 temp = get_push_address (INTVAL (size) - used);
3190 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3191 emit_library_call (chkr_copy_bitmap_libfunc,
3192 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3193 Pmode, XEXP (xinner, 0), Pmode,
3194 GEN_INT (INTVAL (size) - used),
3195 TYPE_MODE (sizetype));
3196 else
3197 emit_library_call (chkr_set_right_libfunc,
3198 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3199 Pmode, GEN_INT (INTVAL (size) - used),
3200 TYPE_MODE (sizetype),
3201 GEN_INT (MEMORY_USE_RW),
3202 TYPE_MODE (integer_type_node));
3203 in_check_memory_usage = 0;
3206 else
3207 #endif /* PUSH_ROUNDING */
3209 rtx target;
3211 /* Otherwise make space on the stack and copy the data
3212 to the address of that space. */
3214 /* Deduct words put into registers from the size we must copy. */
3215 if (partial != 0)
3217 if (GET_CODE (size) == CONST_INT)
3218 size = GEN_INT (INTVAL (size) - used);
3219 else
3220 size = expand_binop (GET_MODE (size), sub_optab, size,
3221 GEN_INT (used), NULL_RTX, 0,
3222 OPTAB_LIB_WIDEN);
3225 /* Get the address of the stack space.
3226 In this case, we do not deal with EXTRA separately.
3227 A single stack adjust will do. */
3228 if (! args_addr)
3230 temp = push_block (size, extra, where_pad == downward);
3231 extra = 0;
3233 else if (GET_CODE (args_so_far) == CONST_INT)
3234 temp = memory_address (BLKmode,
3235 plus_constant (args_addr,
3236 skip + INTVAL (args_so_far)));
3237 else
3238 temp = memory_address (BLKmode,
3239 plus_constant (gen_rtx_PLUS (Pmode,
3240 args_addr,
3241 args_so_far),
3242 skip));
3243 if (current_function_check_memory_usage && ! in_check_memory_usage)
3245 in_check_memory_usage = 1;
3246 target = copy_to_reg (temp);
3247 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3248 emit_library_call (chkr_copy_bitmap_libfunc,
3249 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3250 target, Pmode,
3251 XEXP (xinner, 0), Pmode,
3252 size, TYPE_MODE (sizetype));
3253 else
3254 emit_library_call (chkr_set_right_libfunc,
3255 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3256 target, Pmode,
3257 size, TYPE_MODE (sizetype),
3258 GEN_INT (MEMORY_USE_RW),
3259 TYPE_MODE (integer_type_node));
3260 in_check_memory_usage = 0;
3263 target = gen_rtx_MEM (BLKmode, temp);
3265 if (type != 0)
3267 set_mem_attributes (target, type, 1);
3268 /* Function incoming arguments may overlap with sibling call
3269 outgoing arguments and we cannot allow reordering of reads
3270 from function arguments with stores to outgoing arguments
3271 of sibling calls. */
3272 MEM_ALIAS_SET (target) = 0;
3275 /* TEMP is the address of the block. Copy the data there. */
3276 if (GET_CODE (size) == CONST_INT
3277 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3279 move_by_pieces (target, xinner, INTVAL (size), align);
3280 goto ret;
3282 else
3284 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3285 enum machine_mode mode;
3287 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3288 mode != VOIDmode;
3289 mode = GET_MODE_WIDER_MODE (mode))
3291 enum insn_code code = movstr_optab[(int) mode];
3292 insn_operand_predicate_fn pred;
3294 if (code != CODE_FOR_nothing
3295 && ((GET_CODE (size) == CONST_INT
3296 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3297 <= (GET_MODE_MASK (mode) >> 1)))
3298 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3299 && (!(pred = insn_data[(int) code].operand[0].predicate)
3300 || ((*pred) (target, BLKmode)))
3301 && (!(pred = insn_data[(int) code].operand[1].predicate)
3302 || ((*pred) (xinner, BLKmode)))
3303 && (!(pred = insn_data[(int) code].operand[3].predicate)
3304 || ((*pred) (opalign, VOIDmode))))
3306 rtx op2 = convert_to_mode (mode, size, 1);
3307 rtx last = get_last_insn ();
3308 rtx pat;
3310 pred = insn_data[(int) code].operand[2].predicate;
3311 if (pred != 0 && ! (*pred) (op2, mode))
3312 op2 = copy_to_mode_reg (mode, op2);
3314 pat = GEN_FCN ((int) code) (target, xinner,
3315 op2, opalign);
3316 if (pat)
3318 emit_insn (pat);
3319 goto ret;
3321 else
3322 delete_insns_since (last);
3327 if (!ACCUMULATE_OUTGOING_ARGS)
3329 /* If the source is referenced relative to the stack pointer,
3330 copy it to another register to stabilize it. We do not need
3331 to do this if we know that we won't be changing sp. */
3333 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3334 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3335 temp = copy_to_reg (temp);
3338 /* Make inhibit_defer_pop nonzero around the library call
3339 to force it to pop the bcopy-arguments right away. */
3340 NO_DEFER_POP;
3341 #ifdef TARGET_MEM_FUNCTIONS
3342 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3343 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3344 convert_to_mode (TYPE_MODE (sizetype),
3345 size, TREE_UNSIGNED (sizetype)),
3346 TYPE_MODE (sizetype));
3347 #else
3348 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3349 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3350 convert_to_mode (TYPE_MODE (integer_type_node),
3351 size,
3352 TREE_UNSIGNED (integer_type_node)),
3353 TYPE_MODE (integer_type_node));
3354 #endif
3355 OK_DEFER_POP;
3358 else if (partial > 0)
3360 /* Scalar partly in registers. */
3362 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3363 int i;
3364 int not_stack;
3365 /* # words of start of argument
3366 that we must make space for but need not store. */
3367 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3368 int args_offset = INTVAL (args_so_far);
3369 int skip;
3371 /* Push padding now if padding above and stack grows down,
3372 or if padding below and stack grows up.
3373 But if space already allocated, this has already been done. */
3374 if (extra && args_addr == 0
3375 && where_pad != none && where_pad != stack_direction)
3376 anti_adjust_stack (GEN_INT (extra));
3378 /* If we make space by pushing it, we might as well push
3379 the real data. Otherwise, we can leave OFFSET nonzero
3380 and leave the space uninitialized. */
3381 if (args_addr == 0)
3382 offset = 0;
3384 /* Now NOT_STACK gets the number of words that we don't need to
3385 allocate on the stack. */
3386 not_stack = partial - offset;
3388 /* If the partial register-part of the arg counts in its stack size,
3389 skip the part of stack space corresponding to the registers.
3390 Otherwise, start copying to the beginning of the stack space,
3391 by setting SKIP to 0. */
3392 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3394 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3395 x = validize_mem (force_const_mem (mode, x));
3397 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3398 SUBREGs of such registers are not allowed. */
3399 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3400 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3401 x = copy_to_reg (x);
3403 /* Loop over all the words allocated on the stack for this arg. */
3404 /* We can do it by words, because any scalar bigger than a word
3405 has a size a multiple of a word. */
3406 #ifndef PUSH_ARGS_REVERSED
3407 for (i = not_stack; i < size; i++)
3408 #else
3409 for (i = size - 1; i >= not_stack; i--)
3410 #endif
3411 if (i >= not_stack + offset)
3412 emit_push_insn (operand_subword_force (x, i, mode),
3413 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3414 0, args_addr,
3415 GEN_INT (args_offset + ((i - not_stack + skip)
3416 * UNITS_PER_WORD)),
3417 reg_parm_stack_space, alignment_pad);
3419 else
3421 rtx addr;
3422 rtx target = NULL_RTX;
3423 rtx dest;
3425 /* Push padding now if padding above and stack grows down,
3426 or if padding below and stack grows up.
3427 But if space already allocated, this has already been done. */
3428 if (extra && args_addr == 0
3429 && where_pad != none && where_pad != stack_direction)
3430 anti_adjust_stack (GEN_INT (extra));
3432 #ifdef PUSH_ROUNDING
3433 if (args_addr == 0 && PUSH_ARGS)
3435 addr = gen_push_operand ();
3436 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3438 else
3439 #endif
3441 if (GET_CODE (args_so_far) == CONST_INT)
3442 addr
3443 = memory_address (mode,
3444 plus_constant (args_addr,
3445 INTVAL (args_so_far)));
3446 else
3447 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3448 args_so_far));
3449 target = addr;
3452 dest = gen_rtx_MEM (mode, addr);
3453 if (type != 0)
3455 set_mem_attributes (dest, type, 1);
3456 /* Function incoming arguments may overlap with sibling call
3457 outgoing arguments and we cannot allow reordering of reads
3458 from function arguments with stores to outgoing arguments
3459 of sibling calls. */
3460 MEM_ALIAS_SET (dest) = 0;
3463 emit_move_insn (dest, x);
3465 if (current_function_check_memory_usage && ! in_check_memory_usage)
3467 in_check_memory_usage = 1;
3468 if (target == 0)
3469 target = get_push_address (GET_MODE_SIZE (mode));
3471 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3472 emit_library_call (chkr_copy_bitmap_libfunc,
3473 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3474 Pmode, XEXP (x, 0), Pmode,
3475 GEN_INT (GET_MODE_SIZE (mode)),
3476 TYPE_MODE (sizetype));
3477 else
3478 emit_library_call (chkr_set_right_libfunc,
3479 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3480 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
3481 TYPE_MODE (sizetype),
3482 GEN_INT (MEMORY_USE_RW),
3483 TYPE_MODE (integer_type_node));
3484 in_check_memory_usage = 0;
3488 ret:
3489 /* If part should go in registers, copy that part
3490 into the appropriate registers. Do this now, at the end,
3491 since mem-to-mem copies above may do function calls. */
3492 if (partial > 0 && reg != 0)
3494 /* Handle calls that pass values in multiple non-contiguous locations.
3495 The Irix 6 ABI has examples of this. */
3496 if (GET_CODE (reg) == PARALLEL)
3497 emit_group_load (reg, x, -1, align); /* ??? size? */
3498 else
3499 move_block_to_reg (REGNO (reg), x, partial, mode);
3502 if (extra && args_addr == 0 && where_pad == stack_direction)
3503 anti_adjust_stack (GEN_INT (extra));
3505 if (alignment_pad && args_addr == 0)
3506 anti_adjust_stack (alignment_pad);
3509 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3510 operations. */
3512 static rtx
3513 get_subtarget (x)
3514 rtx x;
3516 return ((x == 0
3517 /* Only registers can be subtargets. */
3518 || GET_CODE (x) != REG
3519 /* If the register is readonly, it can't be set more than once. */
3520 || RTX_UNCHANGING_P (x)
3521 /* Don't use hard regs to avoid extending their life. */
3522 || REGNO (x) < FIRST_PSEUDO_REGISTER
3523 /* Avoid subtargets inside loops,
3524 since they hide some invariant expressions. */
3525 || preserve_subexpressions_p ())
3526 ? 0 : x);
3529 /* Expand an assignment that stores the value of FROM into TO.
3530 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3531 (This may contain a QUEUED rtx;
3532 if the value is constant, this rtx is a constant.)
3533 Otherwise, the returned value is NULL_RTX.
3535 SUGGEST_REG is no longer actually used.
3536 It used to mean, copy the value through a register
3537 and return that register, if that is possible.
3538 We now use WANT_VALUE to decide whether to do this. */
3541 expand_assignment (to, from, want_value, suggest_reg)
3542 tree to, from;
3543 int want_value;
3544 int suggest_reg ATTRIBUTE_UNUSED;
3546 register rtx to_rtx = 0;
3547 rtx result;
3549 /* Don't crash if the lhs of the assignment was erroneous. */
3551 if (TREE_CODE (to) == ERROR_MARK)
3553 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3554 return want_value ? result : NULL_RTX;
3557 /* Assignment of a structure component needs special treatment
3558 if the structure component's rtx is not simply a MEM.
3559 Assignment of an array element at a constant index, and assignment of
3560 an array element in an unaligned packed structure field, has the same
3561 problem. */
3563 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3564 || TREE_CODE (to) == ARRAY_REF)
3566 enum machine_mode mode1;
3567 HOST_WIDE_INT bitsize, bitpos;
3568 tree offset;
3569 int unsignedp;
3570 int volatilep = 0;
3571 tree tem;
3572 unsigned int alignment;
3574 push_temp_slots ();
3575 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3576 &unsignedp, &volatilep, &alignment);
3578 /* If we are going to use store_bit_field and extract_bit_field,
3579 make sure to_rtx will be safe for multiple use. */
3581 if (mode1 == VOIDmode && want_value)
3582 tem = stabilize_reference (tem);
3584 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3585 if (offset != 0)
3587 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3589 if (GET_CODE (to_rtx) != MEM)
3590 abort ();
3592 if (GET_MODE (offset_rtx) != ptr_mode)
3594 #ifdef POINTERS_EXTEND_UNSIGNED
3595 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3596 #else
3597 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3598 #endif
3601 /* A constant address in TO_RTX can have VOIDmode, we must not try
3602 to call force_reg for that case. Avoid that case. */
3603 if (GET_CODE (to_rtx) == MEM
3604 && GET_MODE (to_rtx) == BLKmode
3605 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3606 && bitsize
3607 && (bitpos % bitsize) == 0
3608 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3609 && alignment == GET_MODE_ALIGNMENT (mode1))
3611 rtx temp = change_address (to_rtx, mode1,
3612 plus_constant (XEXP (to_rtx, 0),
3613 (bitpos /
3614 BITS_PER_UNIT)));
3615 if (GET_CODE (XEXP (temp, 0)) == REG)
3616 to_rtx = temp;
3617 else
3618 to_rtx = change_address (to_rtx, mode1,
3619 force_reg (GET_MODE (XEXP (temp, 0)),
3620 XEXP (temp, 0)));
3621 bitpos = 0;
3624 to_rtx = change_address (to_rtx, VOIDmode,
3625 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3626 force_reg (ptr_mode,
3627 offset_rtx)));
3630 if (volatilep)
3632 if (GET_CODE (to_rtx) == MEM)
3634 /* When the offset is zero, to_rtx is the address of the
3635 structure we are storing into, and hence may be shared.
3636 We must make a new MEM before setting the volatile bit. */
3637 if (offset == 0)
3638 to_rtx = copy_rtx (to_rtx);
3640 MEM_VOLATILE_P (to_rtx) = 1;
3642 #if 0 /* This was turned off because, when a field is volatile
3643 in an object which is not volatile, the object may be in a register,
3644 and then we would abort over here. */
3645 else
3646 abort ();
3647 #endif
3650 if (TREE_CODE (to) == COMPONENT_REF
3651 && TREE_READONLY (TREE_OPERAND (to, 1)))
3653 if (offset == 0)
3654 to_rtx = copy_rtx (to_rtx);
3656 RTX_UNCHANGING_P (to_rtx) = 1;
3659 /* Check the access. */
3660 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3662 rtx to_addr;
3663 int size;
3664 int best_mode_size;
3665 enum machine_mode best_mode;
3667 best_mode = get_best_mode (bitsize, bitpos,
3668 TYPE_ALIGN (TREE_TYPE (tem)),
3669 mode1, volatilep);
3670 if (best_mode == VOIDmode)
3671 best_mode = QImode;
3673 best_mode_size = GET_MODE_BITSIZE (best_mode);
3674 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3675 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3676 size *= GET_MODE_SIZE (best_mode);
3678 /* Check the access right of the pointer. */
3679 in_check_memory_usage = 1;
3680 if (size)
3681 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3682 VOIDmode, 3, to_addr, Pmode,
3683 GEN_INT (size), TYPE_MODE (sizetype),
3684 GEN_INT (MEMORY_USE_WO),
3685 TYPE_MODE (integer_type_node));
3686 in_check_memory_usage = 0;
3689 /* If this is a varying-length object, we must get the address of
3690 the source and do an explicit block move. */
3691 if (bitsize < 0)
3693 unsigned int from_align;
3694 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3695 rtx inner_to_rtx
3696 = change_address (to_rtx, VOIDmode,
3697 plus_constant (XEXP (to_rtx, 0),
3698 bitpos / BITS_PER_UNIT));
3700 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
3701 MIN (alignment, from_align));
3702 free_temp_slots ();
3703 pop_temp_slots ();
3704 return to_rtx;
3706 else
3708 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3709 (want_value
3710 /* Spurious cast for HPUX compiler. */
3711 ? ((enum machine_mode)
3712 TYPE_MODE (TREE_TYPE (to)))
3713 : VOIDmode),
3714 unsignedp,
3715 alignment,
3716 int_size_in_bytes (TREE_TYPE (tem)),
3717 get_alias_set (to));
3719 preserve_temp_slots (result);
3720 free_temp_slots ();
3721 pop_temp_slots ();
3723 /* If the value is meaningful, convert RESULT to the proper mode.
3724 Otherwise, return nothing. */
3725 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3726 TYPE_MODE (TREE_TYPE (from)),
3727 result,
3728 TREE_UNSIGNED (TREE_TYPE (to)))
3729 : NULL_RTX);
3733 /* If the rhs is a function call and its value is not an aggregate,
3734 call the function before we start to compute the lhs.
3735 This is needed for correct code for cases such as
3736 val = setjmp (buf) on machines where reference to val
3737 requires loading up part of an address in a separate insn.
3739 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3740 since it might be a promoted variable where the zero- or sign- extension
3741 needs to be done. Handling this in the normal way is safe because no
3742 computation is done before the call. */
3743 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3744 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3745 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3746 && GET_CODE (DECL_RTL (to)) == REG))
3748 rtx value;
3750 push_temp_slots ();
3751 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3752 if (to_rtx == 0)
3753 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3755 /* Handle calls that return values in multiple non-contiguous locations.
3756 The Irix 6 ABI has examples of this. */
3757 if (GET_CODE (to_rtx) == PARALLEL)
3758 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3759 TYPE_ALIGN (TREE_TYPE (from)));
3760 else if (GET_MODE (to_rtx) == BLKmode)
3761 emit_block_move (to_rtx, value, expr_size (from),
3762 TYPE_ALIGN (TREE_TYPE (from)));
3763 else
3765 #ifdef POINTERS_EXTEND_UNSIGNED
3766 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3767 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3768 value = convert_memory_address (GET_MODE (to_rtx), value);
3769 #endif
3770 emit_move_insn (to_rtx, value);
3772 preserve_temp_slots (to_rtx);
3773 free_temp_slots ();
3774 pop_temp_slots ();
3775 return want_value ? to_rtx : NULL_RTX;
3778 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3779 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3781 if (to_rtx == 0)
3783 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3784 if (GET_CODE (to_rtx) == MEM)
3785 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3788 /* Don't move directly into a return register. */
3789 if (TREE_CODE (to) == RESULT_DECL
3790 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3792 rtx temp;
3794 push_temp_slots ();
3795 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3797 if (GET_CODE (to_rtx) == PARALLEL)
3798 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3799 TYPE_ALIGN (TREE_TYPE (from)));
3800 else
3801 emit_move_insn (to_rtx, temp);
3803 preserve_temp_slots (to_rtx);
3804 free_temp_slots ();
3805 pop_temp_slots ();
3806 return want_value ? to_rtx : NULL_RTX;
3809 /* In case we are returning the contents of an object which overlaps
3810 the place the value is being stored, use a safe function when copying
3811 a value through a pointer into a structure value return block. */
3812 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3813 && current_function_returns_struct
3814 && !current_function_returns_pcc_struct)
3816 rtx from_rtx, size;
3818 push_temp_slots ();
3819 size = expr_size (from);
3820 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3821 EXPAND_MEMORY_USE_DONT);
3823 /* Copy the rights of the bitmap. */
3824 if (current_function_check_memory_usage)
3825 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3826 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3827 XEXP (from_rtx, 0), Pmode,
3828 convert_to_mode (TYPE_MODE (sizetype),
3829 size, TREE_UNSIGNED (sizetype)),
3830 TYPE_MODE (sizetype));
3832 #ifdef TARGET_MEM_FUNCTIONS
3833 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3834 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3835 XEXP (from_rtx, 0), Pmode,
3836 convert_to_mode (TYPE_MODE (sizetype),
3837 size, TREE_UNSIGNED (sizetype)),
3838 TYPE_MODE (sizetype));
3839 #else
3840 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3841 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3842 XEXP (to_rtx, 0), Pmode,
3843 convert_to_mode (TYPE_MODE (integer_type_node),
3844 size, TREE_UNSIGNED (integer_type_node)),
3845 TYPE_MODE (integer_type_node));
3846 #endif
3848 preserve_temp_slots (to_rtx);
3849 free_temp_slots ();
3850 pop_temp_slots ();
3851 return want_value ? to_rtx : NULL_RTX;
3854 /* Compute FROM and store the value in the rtx we got. */
3856 push_temp_slots ();
3857 result = store_expr (from, to_rtx, want_value);
3858 preserve_temp_slots (result);
3859 free_temp_slots ();
3860 pop_temp_slots ();
3861 return want_value ? result : NULL_RTX;
3864 /* Generate code for computing expression EXP,
3865 and storing the value into TARGET.
3866 TARGET may contain a QUEUED rtx.
3868 If WANT_VALUE is nonzero, return a copy of the value
3869 not in TARGET, so that we can be sure to use the proper
3870 value in a containing expression even if TARGET has something
3871 else stored in it. If possible, we copy the value through a pseudo
3872 and return that pseudo. Or, if the value is constant, we try to
3873 return the constant. In some cases, we return a pseudo
3874 copied *from* TARGET.
3876 If the mode is BLKmode then we may return TARGET itself.
3877 It turns out that in BLKmode it doesn't cause a problem.
3878 because C has no operators that could combine two different
3879 assignments into the same BLKmode object with different values
3880 with no sequence point. Will other languages need this to
3881 be more thorough?
3883 If WANT_VALUE is 0, we return NULL, to make sure
3884 to catch quickly any cases where the caller uses the value
3885 and fails to set WANT_VALUE. */
3888 store_expr (exp, target, want_value)
3889 register tree exp;
3890 register rtx target;
3891 int want_value;
3893 register rtx temp;
3894 int dont_return_target = 0;
3896 if (TREE_CODE (exp) == COMPOUND_EXPR)
3898 /* Perform first part of compound expression, then assign from second
3899 part. */
3900 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3901 emit_queue ();
3902 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3904 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3906 /* For conditional expression, get safe form of the target. Then
3907 test the condition, doing the appropriate assignment on either
3908 side. This avoids the creation of unnecessary temporaries.
3909 For non-BLKmode, it is more efficient not to do this. */
3911 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3913 emit_queue ();
3914 target = protect_from_queue (target, 1);
3916 do_pending_stack_adjust ();
3917 NO_DEFER_POP;
3918 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3919 start_cleanup_deferral ();
3920 store_expr (TREE_OPERAND (exp, 1), target, 0);
3921 end_cleanup_deferral ();
3922 emit_queue ();
3923 emit_jump_insn (gen_jump (lab2));
3924 emit_barrier ();
3925 emit_label (lab1);
3926 start_cleanup_deferral ();
3927 store_expr (TREE_OPERAND (exp, 2), target, 0);
3928 end_cleanup_deferral ();
3929 emit_queue ();
3930 emit_label (lab2);
3931 OK_DEFER_POP;
3933 return want_value ? target : NULL_RTX;
3935 else if (queued_subexp_p (target))
3936 /* If target contains a postincrement, let's not risk
3937 using it as the place to generate the rhs. */
3939 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3941 /* Expand EXP into a new pseudo. */
3942 temp = gen_reg_rtx (GET_MODE (target));
3943 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3945 else
3946 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3948 /* If target is volatile, ANSI requires accessing the value
3949 *from* the target, if it is accessed. So make that happen.
3950 In no case return the target itself. */
3951 if (! MEM_VOLATILE_P (target) && want_value)
3952 dont_return_target = 1;
3954 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3955 && GET_MODE (target) != BLKmode)
3956 /* If target is in memory and caller wants value in a register instead,
3957 arrange that. Pass TARGET as target for expand_expr so that,
3958 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3959 We know expand_expr will not use the target in that case.
3960 Don't do this if TARGET is volatile because we are supposed
3961 to write it and then read it. */
3963 temp = expand_expr (exp, target, GET_MODE (target), 0);
3964 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3965 temp = copy_to_reg (temp);
3966 dont_return_target = 1;
3968 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3969 /* If this is an scalar in a register that is stored in a wider mode
3970 than the declared mode, compute the result into its declared mode
3971 and then convert to the wider mode. Our value is the computed
3972 expression. */
3974 /* If we don't want a value, we can do the conversion inside EXP,
3975 which will often result in some optimizations. Do the conversion
3976 in two steps: first change the signedness, if needed, then
3977 the extend. But don't do this if the type of EXP is a subtype
3978 of something else since then the conversion might involve
3979 more than just converting modes. */
3980 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3981 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3983 if (TREE_UNSIGNED (TREE_TYPE (exp))
3984 != SUBREG_PROMOTED_UNSIGNED_P (target))
3986 = convert
3987 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3988 TREE_TYPE (exp)),
3989 exp);
3991 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3992 SUBREG_PROMOTED_UNSIGNED_P (target)),
3993 exp);
3996 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3998 /* If TEMP is a volatile MEM and we want a result value, make
3999 the access now so it gets done only once. Likewise if
4000 it contains TARGET. */
4001 if (GET_CODE (temp) == MEM && want_value
4002 && (MEM_VOLATILE_P (temp)
4003 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4004 temp = copy_to_reg (temp);
4006 /* If TEMP is a VOIDmode constant, use convert_modes to make
4007 sure that we properly convert it. */
4008 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4009 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4010 TYPE_MODE (TREE_TYPE (exp)), temp,
4011 SUBREG_PROMOTED_UNSIGNED_P (target));
4013 convert_move (SUBREG_REG (target), temp,
4014 SUBREG_PROMOTED_UNSIGNED_P (target));
4016 /* If we promoted a constant, change the mode back down to match
4017 target. Otherwise, the caller might get confused by a result whose
4018 mode is larger than expected. */
4020 if (want_value && GET_MODE (temp) != GET_MODE (target)
4021 && GET_MODE (temp) != VOIDmode)
4023 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
4024 SUBREG_PROMOTED_VAR_P (temp) = 1;
4025 SUBREG_PROMOTED_UNSIGNED_P (temp)
4026 = SUBREG_PROMOTED_UNSIGNED_P (target);
4029 return want_value ? temp : NULL_RTX;
4031 else
4033 temp = expand_expr (exp, target, GET_MODE (target), 0);
4034 /* Return TARGET if it's a specified hardware register.
4035 If TARGET is a volatile mem ref, either return TARGET
4036 or return a reg copied *from* TARGET; ANSI requires this.
4038 Otherwise, if TEMP is not TARGET, return TEMP
4039 if it is constant (for efficiency),
4040 or if we really want the correct value. */
4041 if (!(target && GET_CODE (target) == REG
4042 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4043 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4044 && ! rtx_equal_p (temp, target)
4045 && (CONSTANT_P (temp) || want_value))
4046 dont_return_target = 1;
4049 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4050 the same as that of TARGET, adjust the constant. This is needed, for
4051 example, in case it is a CONST_DOUBLE and we want only a word-sized
4052 value. */
4053 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4054 && TREE_CODE (exp) != ERROR_MARK
4055 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4056 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4057 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4059 if (current_function_check_memory_usage
4060 && GET_CODE (target) == MEM
4061 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
4063 in_check_memory_usage = 1;
4064 if (GET_CODE (temp) == MEM)
4065 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
4066 VOIDmode, 3, XEXP (target, 0), Pmode,
4067 XEXP (temp, 0), Pmode,
4068 expr_size (exp), TYPE_MODE (sizetype));
4069 else
4070 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
4071 VOIDmode, 3, XEXP (target, 0), Pmode,
4072 expr_size (exp), TYPE_MODE (sizetype),
4073 GEN_INT (MEMORY_USE_WO),
4074 TYPE_MODE (integer_type_node));
4075 in_check_memory_usage = 0;
4078 /* If value was not generated in the target, store it there.
4079 Convert the value to TARGET's type first if nec. */
4080 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4081 one or both of them are volatile memory refs, we have to distinguish
4082 two cases:
4083 - expand_expr has used TARGET. In this case, we must not generate
4084 another copy. This can be detected by TARGET being equal according
4085 to == .
4086 - expand_expr has not used TARGET - that means that the source just
4087 happens to have the same RTX form. Since temp will have been created
4088 by expand_expr, it will compare unequal according to == .
4089 We must generate a copy in this case, to reach the correct number
4090 of volatile memory references. */
4092 if ((! rtx_equal_p (temp, target)
4093 || (temp != target && (side_effects_p (temp)
4094 || side_effects_p (target))))
4095 && TREE_CODE (exp) != ERROR_MARK)
4097 target = protect_from_queue (target, 1);
4098 if (GET_MODE (temp) != GET_MODE (target)
4099 && GET_MODE (temp) != VOIDmode)
4101 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4102 if (dont_return_target)
4104 /* In this case, we will return TEMP,
4105 so make sure it has the proper mode.
4106 But don't forget to store the value into TARGET. */
4107 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4108 emit_move_insn (target, temp);
4110 else
4111 convert_move (target, temp, unsignedp);
4114 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4116 /* Handle copying a string constant into an array.
4117 The string constant may be shorter than the array.
4118 So copy just the string's actual length, and clear the rest. */
4119 rtx size;
4120 rtx addr;
4122 /* Get the size of the data type of the string,
4123 which is actually the size of the target. */
4124 size = expr_size (exp);
4125 if (GET_CODE (size) == CONST_INT
4126 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4127 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
4128 else
4130 /* Compute the size of the data to copy from the string. */
4131 tree copy_size
4132 = size_binop (MIN_EXPR,
4133 make_tree (sizetype, size),
4134 size_int (TREE_STRING_LENGTH (exp)));
4135 unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
4136 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4137 VOIDmode, 0);
4138 rtx label = 0;
4140 /* Copy that much. */
4141 emit_block_move (target, temp, copy_size_rtx,
4142 TYPE_ALIGN (TREE_TYPE (exp)));
4144 /* Figure out how much is left in TARGET that we have to clear.
4145 Do all calculations in ptr_mode. */
4147 addr = XEXP (target, 0);
4148 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4150 if (GET_CODE (copy_size_rtx) == CONST_INT)
4152 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
4153 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
4154 align = MIN (align,
4155 (unsigned int) (BITS_PER_UNIT
4156 * (INTVAL (copy_size_rtx)
4157 & - INTVAL (copy_size_rtx))));
4159 else
4161 addr = force_reg (ptr_mode, addr);
4162 addr = expand_binop (ptr_mode, add_optab, addr,
4163 copy_size_rtx, NULL_RTX, 0,
4164 OPTAB_LIB_WIDEN);
4166 size = expand_binop (ptr_mode, sub_optab, size,
4167 copy_size_rtx, NULL_RTX, 0,
4168 OPTAB_LIB_WIDEN);
4170 align = BITS_PER_UNIT;
4171 label = gen_label_rtx ();
4172 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4173 GET_MODE (size), 0, 0, label);
4175 align = MIN (align, expr_align (copy_size));
4177 if (size != const0_rtx)
4179 rtx dest = gen_rtx_MEM (BLKmode, addr);
4181 MEM_COPY_ATTRIBUTES (dest, target);
4183 /* Be sure we can write on ADDR. */
4184 in_check_memory_usage = 1;
4185 if (current_function_check_memory_usage)
4186 emit_library_call (chkr_check_addr_libfunc,
4187 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4188 addr, Pmode,
4189 size, TYPE_MODE (sizetype),
4190 GEN_INT (MEMORY_USE_WO),
4191 TYPE_MODE (integer_type_node));
4192 in_check_memory_usage = 0;
4193 clear_storage (dest, size, align);
4196 if (label)
4197 emit_label (label);
4200 /* Handle calls that return values in multiple non-contiguous locations.
4201 The Irix 6 ABI has examples of this. */
4202 else if (GET_CODE (target) == PARALLEL)
4203 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4204 TYPE_ALIGN (TREE_TYPE (exp)));
4205 else if (GET_MODE (temp) == BLKmode)
4206 emit_block_move (target, temp, expr_size (exp),
4207 TYPE_ALIGN (TREE_TYPE (exp)));
4208 else
4209 emit_move_insn (target, temp);
4212 /* If we don't want a value, return NULL_RTX. */
4213 if (! want_value)
4214 return NULL_RTX;
4216 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4217 ??? The latter test doesn't seem to make sense. */
4218 else if (dont_return_target && GET_CODE (temp) != MEM)
4219 return temp;
4221 /* Return TARGET itself if it is a hard register. */
4222 else if (want_value && GET_MODE (target) != BLKmode
4223 && ! (GET_CODE (target) == REG
4224 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4225 return copy_to_reg (target);
4227 else
4228 return target;
4231 /* Return 1 if EXP just contains zeros. */
4233 static int
4234 is_zeros_p (exp)
4235 tree exp;
4237 tree elt;
4239 switch (TREE_CODE (exp))
4241 case CONVERT_EXPR:
4242 case NOP_EXPR:
4243 case NON_LVALUE_EXPR:
4244 return is_zeros_p (TREE_OPERAND (exp, 0));
4246 case INTEGER_CST:
4247 return integer_zerop (exp);
4249 case COMPLEX_CST:
4250 return
4251 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4253 case REAL_CST:
4254 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4256 case CONSTRUCTOR:
4257 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4258 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4259 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4260 if (! is_zeros_p (TREE_VALUE (elt)))
4261 return 0;
4263 return 1;
4265 default:
4266 return 0;
4270 /* Return 1 if EXP contains mostly (3/4) zeros. */
4272 static int
4273 mostly_zeros_p (exp)
4274 tree exp;
4276 if (TREE_CODE (exp) == CONSTRUCTOR)
4278 int elts = 0, zeros = 0;
4279 tree elt = CONSTRUCTOR_ELTS (exp);
4280 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4282 /* If there are no ranges of true bits, it is all zero. */
4283 return elt == NULL_TREE;
4285 for (; elt; elt = TREE_CHAIN (elt))
4287 /* We do not handle the case where the index is a RANGE_EXPR,
4288 so the statistic will be somewhat inaccurate.
4289 We do make a more accurate count in store_constructor itself,
4290 so since this function is only used for nested array elements,
4291 this should be close enough. */
4292 if (mostly_zeros_p (TREE_VALUE (elt)))
4293 zeros++;
4294 elts++;
4297 return 4 * zeros >= 3 * elts;
4300 return is_zeros_p (exp);
4303 /* Helper function for store_constructor.
4304 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4305 TYPE is the type of the CONSTRUCTOR, not the element type.
4306 ALIGN and CLEARED are as for store_constructor.
4307 ALIAS_SET is the alias set to use for any stores.
4309 This provides a recursive shortcut back to store_constructor when it isn't
4310 necessary to go through store_field. This is so that we can pass through
4311 the cleared field to let store_constructor know that we may not have to
4312 clear a substructure if the outer structure has already been cleared. */
4314 static void
4315 store_constructor_field (target, bitsize, bitpos,
4316 mode, exp, type, align, cleared, alias_set)
4317 rtx target;
4318 unsigned HOST_WIDE_INT bitsize;
4319 HOST_WIDE_INT bitpos;
4320 enum machine_mode mode;
4321 tree exp, type;
4322 unsigned int align;
4323 int cleared;
4324 int alias_set;
4326 if (TREE_CODE (exp) == CONSTRUCTOR
4327 && bitpos % BITS_PER_UNIT == 0
4328 /* If we have a non-zero bitpos for a register target, then we just
4329 let store_field do the bitfield handling. This is unlikely to
4330 generate unnecessary clear instructions anyways. */
4331 && (bitpos == 0 || GET_CODE (target) == MEM))
4333 if (bitpos != 0)
4334 target
4335 = change_address (target,
4336 GET_MODE (target) == BLKmode
4337 || 0 != (bitpos
4338 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4339 ? BLKmode : VOIDmode,
4340 plus_constant (XEXP (target, 0),
4341 bitpos / BITS_PER_UNIT));
4344 /* Show the alignment may no longer be what it was and update the alias
4345 set, if required. */
4346 if (bitpos != 0)
4347 align = MIN (align, (unsigned int) bitpos & - bitpos);
4348 if (GET_CODE (target) == MEM)
4349 MEM_ALIAS_SET (target) = alias_set;
4351 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4353 else
4354 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4355 int_size_in_bytes (type), alias_set);
4358 /* Store the value of constructor EXP into the rtx TARGET.
4359 TARGET is either a REG or a MEM.
4360 ALIGN is the maximum known alignment for TARGET.
4361 CLEARED is true if TARGET is known to have been zero'd.
4362 SIZE is the number of bytes of TARGET we are allowed to modify: this
4363 may not be the same as the size of EXP if we are assigning to a field
4364 which has been packed to exclude padding bits. */
4366 static void
4367 store_constructor (exp, target, align, cleared, size)
4368 tree exp;
4369 rtx target;
4370 unsigned int align;
4371 int cleared;
4372 HOST_WIDE_INT size;
4374 tree type = TREE_TYPE (exp);
4375 #ifdef WORD_REGISTER_OPERATIONS
4376 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4377 #endif
4379 /* We know our target cannot conflict, since safe_from_p has been called. */
4380 #if 0
4381 /* Don't try copying piece by piece into a hard register
4382 since that is vulnerable to being clobbered by EXP.
4383 Instead, construct in a pseudo register and then copy it all. */
4384 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4386 rtx temp = gen_reg_rtx (GET_MODE (target));
4387 store_constructor (exp, temp, align, cleared, size);
4388 emit_move_insn (target, temp);
4389 return;
4391 #endif
4393 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4394 || TREE_CODE (type) == QUAL_UNION_TYPE)
4396 register tree elt;
4398 /* Inform later passes that the whole union value is dead. */
4399 if ((TREE_CODE (type) == UNION_TYPE
4400 || TREE_CODE (type) == QUAL_UNION_TYPE)
4401 && ! cleared)
4403 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4405 /* If the constructor is empty, clear the union. */
4406 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4407 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4410 /* If we are building a static constructor into a register,
4411 set the initial value as zero so we can fold the value into
4412 a constant. But if more than one register is involved,
4413 this probably loses. */
4414 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4415 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4417 if (! cleared)
4418 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4420 cleared = 1;
4423 /* If the constructor has fewer fields than the structure
4424 or if we are initializing the structure to mostly zeros,
4425 clear the whole structure first. Don't do this is TARGET is
4426 register whose mode size isn't equal to SIZE since clear_storage
4427 can't handle this case. */
4428 else if (size > 0
4429 && ((list_length (CONSTRUCTOR_ELTS (exp))
4430 != fields_length (type))
4431 || mostly_zeros_p (exp))
4432 && (GET_CODE (target) != REG
4433 || (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) == size))
4435 if (! cleared)
4436 clear_storage (target, GEN_INT (size), align);
4438 cleared = 1;
4440 else if (! cleared)
4441 /* Inform later passes that the old value is dead. */
4442 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4444 /* Store each element of the constructor into
4445 the corresponding field of TARGET. */
4447 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4449 register tree field = TREE_PURPOSE (elt);
4450 #ifdef WORD_REGISTER_OPERATIONS
4451 tree value = TREE_VALUE (elt);
4452 #endif
4453 register enum machine_mode mode;
4454 HOST_WIDE_INT bitsize;
4455 HOST_WIDE_INT bitpos = 0;
4456 int unsignedp;
4457 tree offset;
4458 rtx to_rtx = target;
4460 /* Just ignore missing fields.
4461 We cleared the whole structure, above,
4462 if any fields are missing. */
4463 if (field == 0)
4464 continue;
4466 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4467 continue;
4469 if (host_integerp (DECL_SIZE (field), 1))
4470 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4471 else
4472 bitsize = -1;
4474 unsignedp = TREE_UNSIGNED (field);
4475 mode = DECL_MODE (field);
4476 if (DECL_BIT_FIELD (field))
4477 mode = VOIDmode;
4479 offset = DECL_FIELD_OFFSET (field);
4480 if (host_integerp (offset, 0)
4481 && host_integerp (bit_position (field), 0))
4483 bitpos = int_bit_position (field);
4484 offset = 0;
4486 else
4487 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4489 if (offset)
4491 rtx offset_rtx;
4493 if (contains_placeholder_p (offset))
4494 offset = build (WITH_RECORD_EXPR, sizetype,
4495 offset, make_tree (TREE_TYPE (exp), target));
4497 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4498 if (GET_CODE (to_rtx) != MEM)
4499 abort ();
4501 if (GET_MODE (offset_rtx) != ptr_mode)
4503 #ifdef POINTERS_EXTEND_UNSIGNED
4504 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4505 #else
4506 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4507 #endif
4510 to_rtx
4511 = change_address (to_rtx, VOIDmode,
4512 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4513 force_reg (ptr_mode,
4514 offset_rtx)));
4515 align = DECL_OFFSET_ALIGN (field);
4518 if (TREE_READONLY (field))
4520 if (GET_CODE (to_rtx) == MEM)
4521 to_rtx = copy_rtx (to_rtx);
4523 RTX_UNCHANGING_P (to_rtx) = 1;
4526 #ifdef WORD_REGISTER_OPERATIONS
4527 /* If this initializes a field that is smaller than a word, at the
4528 start of a word, try to widen it to a full word.
4529 This special case allows us to output C++ member function
4530 initializations in a form that the optimizers can understand. */
4531 if (GET_CODE (target) == REG
4532 && bitsize < BITS_PER_WORD
4533 && bitpos % BITS_PER_WORD == 0
4534 && GET_MODE_CLASS (mode) == MODE_INT
4535 && TREE_CODE (value) == INTEGER_CST
4536 && exp_size >= 0
4537 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4539 tree type = TREE_TYPE (value);
4540 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4542 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4543 value = convert (type, value);
4545 if (BYTES_BIG_ENDIAN)
4546 value
4547 = fold (build (LSHIFT_EXPR, type, value,
4548 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4549 bitsize = BITS_PER_WORD;
4550 mode = word_mode;
4552 #endif
4553 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4554 TREE_VALUE (elt), type, align, cleared,
4555 (DECL_NONADDRESSABLE_P (field)
4556 && GET_CODE (to_rtx) == MEM)
4557 ? MEM_ALIAS_SET (to_rtx)
4558 : get_alias_set (TREE_TYPE (field)));
4561 else if (TREE_CODE (type) == ARRAY_TYPE)
4563 register tree elt;
4564 register int i;
4565 int need_to_clear;
4566 tree domain = TYPE_DOMAIN (type);
4567 tree elttype = TREE_TYPE (type);
4568 int const_bounds_p = (host_integerp (TYPE_MIN_VALUE (domain), 0)
4569 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4570 HOST_WIDE_INT minelt;
4571 HOST_WIDE_INT maxelt;
4573 /* If we have constant bounds for the range of the type, get them. */
4574 if (const_bounds_p)
4576 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4577 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4580 /* If the constructor has fewer elements than the array,
4581 clear the whole array first. Similarly if this is
4582 static constructor of a non-BLKmode object. */
4583 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4584 need_to_clear = 1;
4585 else
4587 HOST_WIDE_INT count = 0, zero_count = 0;
4588 need_to_clear = ! const_bounds_p;
4590 /* This loop is a more accurate version of the loop in
4591 mostly_zeros_p (it handles RANGE_EXPR in an index).
4592 It is also needed to check for missing elements. */
4593 for (elt = CONSTRUCTOR_ELTS (exp);
4594 elt != NULL_TREE && ! need_to_clear;
4595 elt = TREE_CHAIN (elt))
4597 tree index = TREE_PURPOSE (elt);
4598 HOST_WIDE_INT this_node_count;
4600 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4602 tree lo_index = TREE_OPERAND (index, 0);
4603 tree hi_index = TREE_OPERAND (index, 1);
4605 if (! host_integerp (lo_index, 1)
4606 || ! host_integerp (hi_index, 1))
4608 need_to_clear = 1;
4609 break;
4612 this_node_count = (tree_low_cst (hi_index, 1)
4613 - tree_low_cst (lo_index, 1) + 1);
4615 else
4616 this_node_count = 1;
4618 count += this_node_count;
4619 if (mostly_zeros_p (TREE_VALUE (elt)))
4620 zero_count += this_node_count;
4623 /* Clear the entire array first if there are any missing elements,
4624 or if the incidence of zero elements is >= 75%. */
4625 if (! need_to_clear
4626 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4627 need_to_clear = 1;
4630 if (need_to_clear && size > 0)
4632 if (! cleared)
4633 clear_storage (target, GEN_INT (size), align);
4634 cleared = 1;
4636 else
4637 /* Inform later passes that the old value is dead. */
4638 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4640 /* Store each element of the constructor into
4641 the corresponding element of TARGET, determined
4642 by counting the elements. */
4643 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4644 elt;
4645 elt = TREE_CHAIN (elt), i++)
4647 register enum machine_mode mode;
4648 HOST_WIDE_INT bitsize;
4649 HOST_WIDE_INT bitpos;
4650 int unsignedp;
4651 tree value = TREE_VALUE (elt);
4652 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4653 tree index = TREE_PURPOSE (elt);
4654 rtx xtarget = target;
4656 if (cleared && is_zeros_p (value))
4657 continue;
4659 unsignedp = TREE_UNSIGNED (elttype);
4660 mode = TYPE_MODE (elttype);
4661 if (mode == BLKmode)
4662 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4663 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4664 : -1);
4665 else
4666 bitsize = GET_MODE_BITSIZE (mode);
4668 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4670 tree lo_index = TREE_OPERAND (index, 0);
4671 tree hi_index = TREE_OPERAND (index, 1);
4672 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4673 struct nesting *loop;
4674 HOST_WIDE_INT lo, hi, count;
4675 tree position;
4677 /* If the range is constant and "small", unroll the loop. */
4678 if (const_bounds_p
4679 && host_integerp (lo_index, 0)
4680 && host_integerp (hi_index, 0)
4681 && (lo = tree_low_cst (lo_index, 0),
4682 hi = tree_low_cst (hi_index, 0),
4683 count = hi - lo + 1,
4684 (GET_CODE (target) != MEM
4685 || count <= 2
4686 || (host_integerp (TYPE_SIZE (elttype), 1)
4687 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4688 <= 40 * 8)))))
4690 lo -= minelt; hi -= minelt;
4691 for (; lo <= hi; lo++)
4693 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4694 store_constructor_field
4695 (target, bitsize, bitpos, mode, value, type, align,
4696 cleared,
4697 TYPE_NONALIASED_COMPONENT (type)
4698 ? MEM_ALIAS_SET (target) : get_alias_set (elttype));
4701 else
4703 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4704 loop_top = gen_label_rtx ();
4705 loop_end = gen_label_rtx ();
4707 unsignedp = TREE_UNSIGNED (domain);
4709 index = build_decl (VAR_DECL, NULL_TREE, domain);
4711 DECL_RTL (index) = index_r
4712 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4713 &unsignedp, 0));
4715 if (TREE_CODE (value) == SAVE_EXPR
4716 && SAVE_EXPR_RTL (value) == 0)
4718 /* Make sure value gets expanded once before the
4719 loop. */
4720 expand_expr (value, const0_rtx, VOIDmode, 0);
4721 emit_queue ();
4723 store_expr (lo_index, index_r, 0);
4724 loop = expand_start_loop (0);
4726 /* Assign value to element index. */
4727 position
4728 = convert (ssizetype,
4729 fold (build (MINUS_EXPR, TREE_TYPE (index),
4730 index, TYPE_MIN_VALUE (domain))));
4731 position = size_binop (MULT_EXPR, position,
4732 convert (ssizetype,
4733 TYPE_SIZE_UNIT (elttype)));
4735 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4736 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4737 xtarget = change_address (target, mode, addr);
4738 if (TREE_CODE (value) == CONSTRUCTOR)
4739 store_constructor (value, xtarget, align, cleared,
4740 bitsize / BITS_PER_UNIT);
4741 else
4742 store_expr (value, xtarget, 0);
4744 expand_exit_loop_if_false (loop,
4745 build (LT_EXPR, integer_type_node,
4746 index, hi_index));
4748 expand_increment (build (PREINCREMENT_EXPR,
4749 TREE_TYPE (index),
4750 index, integer_one_node), 0, 0);
4751 expand_end_loop ();
4752 emit_label (loop_end);
4755 else if ((index != 0 && ! host_integerp (index, 0))
4756 || ! host_integerp (TYPE_SIZE (elttype), 1))
4758 rtx pos_rtx, addr;
4759 tree position;
4761 if (index == 0)
4762 index = ssize_int (1);
4764 if (minelt)
4765 index = convert (ssizetype,
4766 fold (build (MINUS_EXPR, index,
4767 TYPE_MIN_VALUE (domain))));
4769 position = size_binop (MULT_EXPR, index,
4770 convert (ssizetype,
4771 TYPE_SIZE_UNIT (elttype)));
4772 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4773 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4774 xtarget = change_address (target, mode, addr);
4775 store_expr (value, xtarget, 0);
4777 else
4779 if (index != 0)
4780 bitpos = ((tree_low_cst (index, 0) - minelt)
4781 * tree_low_cst (TYPE_SIZE (elttype), 1));
4782 else
4783 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4785 store_constructor_field (target, bitsize, bitpos, mode, value,
4786 type, align, cleared,
4787 TYPE_NONALIASED_COMPONENT (type)
4788 ? MEM_ALIAS_SET (target) :
4789 get_alias_set (elttype));
4795 /* Set constructor assignments. */
4796 else if (TREE_CODE (type) == SET_TYPE)
4798 tree elt = CONSTRUCTOR_ELTS (exp);
4799 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4800 tree domain = TYPE_DOMAIN (type);
4801 tree domain_min, domain_max, bitlength;
4803 /* The default implementation strategy is to extract the constant
4804 parts of the constructor, use that to initialize the target,
4805 and then "or" in whatever non-constant ranges we need in addition.
4807 If a large set is all zero or all ones, it is
4808 probably better to set it using memset (if available) or bzero.
4809 Also, if a large set has just a single range, it may also be
4810 better to first clear all the first clear the set (using
4811 bzero/memset), and set the bits we want. */
4813 /* Check for all zeros. */
4814 if (elt == NULL_TREE && size > 0)
4816 if (!cleared)
4817 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
4818 return;
4821 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4822 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4823 bitlength = size_binop (PLUS_EXPR,
4824 size_diffop (domain_max, domain_min),
4825 ssize_int (1));
4827 nbits = tree_low_cst (bitlength, 1);
4829 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4830 are "complicated" (more than one range), initialize (the
4831 constant parts) by copying from a constant. */
4832 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4833 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4835 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4836 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4837 char *bit_buffer = (char *) alloca (nbits);
4838 HOST_WIDE_INT word = 0;
4839 unsigned int bit_pos = 0;
4840 unsigned int ibit = 0;
4841 unsigned int offset = 0; /* In bytes from beginning of set. */
4843 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4844 for (;;)
4846 if (bit_buffer[ibit])
4848 if (BYTES_BIG_ENDIAN)
4849 word |= (1 << (set_word_size - 1 - bit_pos));
4850 else
4851 word |= 1 << bit_pos;
4854 bit_pos++; ibit++;
4855 if (bit_pos >= set_word_size || ibit == nbits)
4857 if (word != 0 || ! cleared)
4859 rtx datum = GEN_INT (word);
4860 rtx to_rtx;
4862 /* The assumption here is that it is safe to use
4863 XEXP if the set is multi-word, but not if
4864 it's single-word. */
4865 if (GET_CODE (target) == MEM)
4867 to_rtx = plus_constant (XEXP (target, 0), offset);
4868 to_rtx = change_address (target, mode, to_rtx);
4870 else if (offset == 0)
4871 to_rtx = target;
4872 else
4873 abort ();
4874 emit_move_insn (to_rtx, datum);
4877 if (ibit == nbits)
4878 break;
4879 word = 0;
4880 bit_pos = 0;
4881 offset += set_word_size / BITS_PER_UNIT;
4885 else if (!cleared)
4886 /* Don't bother clearing storage if the set is all ones. */
4887 if (TREE_CHAIN (elt) != NULL_TREE
4888 || (TREE_PURPOSE (elt) == NULL_TREE
4889 ? nbits != 1
4890 : ( ! host_integerp (TREE_VALUE (elt), 0)
4891 || ! host_integerp (TREE_PURPOSE (elt), 0)
4892 || (tree_low_cst (TREE_VALUE (elt), 0)
4893 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4894 != (HOST_WIDE_INT) nbits))))
4895 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4897 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4899 /* Start of range of element or NULL. */
4900 tree startbit = TREE_PURPOSE (elt);
4901 /* End of range of element, or element value. */
4902 tree endbit = TREE_VALUE (elt);
4903 #ifdef TARGET_MEM_FUNCTIONS
4904 HOST_WIDE_INT startb, endb;
4905 #endif
4906 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4908 bitlength_rtx = expand_expr (bitlength,
4909 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4911 /* Handle non-range tuple element like [ expr ]. */
4912 if (startbit == NULL_TREE)
4914 startbit = save_expr (endbit);
4915 endbit = startbit;
4918 startbit = convert (sizetype, startbit);
4919 endbit = convert (sizetype, endbit);
4920 if (! integer_zerop (domain_min))
4922 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4923 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4925 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4926 EXPAND_CONST_ADDRESS);
4927 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4928 EXPAND_CONST_ADDRESS);
4930 if (REG_P (target))
4932 targetx
4933 = assign_temp
4934 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
4935 TYPE_QUAL_CONST)),
4936 0, 1, 1);
4937 emit_move_insn (targetx, target);
4940 else if (GET_CODE (target) == MEM)
4941 targetx = target;
4942 else
4943 abort ();
4945 #ifdef TARGET_MEM_FUNCTIONS
4946 /* Optimization: If startbit and endbit are
4947 constants divisible by BITS_PER_UNIT,
4948 call memset instead. */
4949 if (TREE_CODE (startbit) == INTEGER_CST
4950 && TREE_CODE (endbit) == INTEGER_CST
4951 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4952 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4954 emit_library_call (memset_libfunc, LCT_NORMAL,
4955 VOIDmode, 3,
4956 plus_constant (XEXP (targetx, 0),
4957 startb / BITS_PER_UNIT),
4958 Pmode,
4959 constm1_rtx, TYPE_MODE (integer_type_node),
4960 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4961 TYPE_MODE (sizetype));
4963 else
4964 #endif
4965 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4966 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
4967 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
4968 startbit_rtx, TYPE_MODE (sizetype),
4969 endbit_rtx, TYPE_MODE (sizetype));
4971 if (REG_P (target))
4972 emit_move_insn (target, targetx);
4976 else
4977 abort ();
4980 /* Store the value of EXP (an expression tree)
4981 into a subfield of TARGET which has mode MODE and occupies
4982 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4983 If MODE is VOIDmode, it means that we are storing into a bit-field.
4985 If VALUE_MODE is VOIDmode, return nothing in particular.
4986 UNSIGNEDP is not used in this case.
4988 Otherwise, return an rtx for the value stored. This rtx
4989 has mode VALUE_MODE if that is convenient to do.
4990 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4992 ALIGN is the alignment that TARGET is known to have.
4993 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4995 ALIAS_SET is the alias set for the destination. This value will
4996 (in general) be different from that for TARGET, since TARGET is a
4997 reference to the containing structure. */
4999 static rtx
5000 store_field (target, bitsize, bitpos, mode, exp, value_mode,
5001 unsignedp, align, total_size, alias_set)
5002 rtx target;
5003 HOST_WIDE_INT bitsize;
5004 HOST_WIDE_INT bitpos;
5005 enum machine_mode mode;
5006 tree exp;
5007 enum machine_mode value_mode;
5008 int unsignedp;
5009 unsigned int align;
5010 HOST_WIDE_INT total_size;
5011 int alias_set;
5013 HOST_WIDE_INT width_mask = 0;
5015 if (TREE_CODE (exp) == ERROR_MARK)
5016 return const0_rtx;
5018 if (bitsize < HOST_BITS_PER_WIDE_INT)
5019 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5021 /* If we are storing into an unaligned field of an aligned union that is
5022 in a register, we may have the mode of TARGET being an integer mode but
5023 MODE == BLKmode. In that case, get an aligned object whose size and
5024 alignment are the same as TARGET and store TARGET into it (we can avoid
5025 the store if the field being stored is the entire width of TARGET). Then
5026 call ourselves recursively to store the field into a BLKmode version of
5027 that object. Finally, load from the object into TARGET. This is not
5028 very efficient in general, but should only be slightly more expensive
5029 than the otherwise-required unaligned accesses. Perhaps this can be
5030 cleaned up later. */
5032 if (mode == BLKmode
5033 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5035 rtx object
5036 = assign_temp
5037 (build_qualified_type (type_for_mode (GET_MODE (target), 0),
5038 TYPE_QUAL_CONST),
5039 0, 1, 1);
5040 rtx blk_object = copy_rtx (object);
5042 PUT_MODE (blk_object, BLKmode);
5044 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5045 emit_move_insn (object, target);
5047 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
5048 align, total_size, alias_set);
5050 /* Even though we aren't returning target, we need to
5051 give it the updated value. */
5052 emit_move_insn (target, object);
5054 return blk_object;
5057 if (GET_CODE (target) == CONCAT)
5059 /* We're storing into a struct containing a single __complex. */
5061 if (bitpos != 0)
5062 abort ();
5063 return store_expr (exp, target, 0);
5066 /* If the structure is in a register or if the component
5067 is a bit field, we cannot use addressing to access it.
5068 Use bit-field techniques or SUBREG to store in it. */
5070 if (mode == VOIDmode
5071 || (mode != BLKmode && ! direct_store[(int) mode]
5072 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5073 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5074 || GET_CODE (target) == REG
5075 || GET_CODE (target) == SUBREG
5076 /* If the field isn't aligned enough to store as an ordinary memref,
5077 store it as a bit field. */
5078 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5079 && (align < GET_MODE_ALIGNMENT (mode)
5080 || bitpos % GET_MODE_ALIGNMENT (mode)))
5081 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5082 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
5083 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
5084 /* If the RHS and field are a constant size and the size of the
5085 RHS isn't the same size as the bitfield, we must use bitfield
5086 operations. */
5087 || (bitsize >= 0
5088 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5089 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5091 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5093 /* If BITSIZE is narrower than the size of the type of EXP
5094 we will be narrowing TEMP. Normally, what's wanted are the
5095 low-order bits. However, if EXP's type is a record and this is
5096 big-endian machine, we want the upper BITSIZE bits. */
5097 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5098 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
5099 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5100 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5101 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5102 - bitsize),
5103 temp, 1);
5105 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5106 MODE. */
5107 if (mode != VOIDmode && mode != BLKmode
5108 && mode != TYPE_MODE (TREE_TYPE (exp)))
5109 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5111 /* If the modes of TARGET and TEMP are both BLKmode, both
5112 must be in memory and BITPOS must be aligned on a byte
5113 boundary. If so, we simply do a block copy. */
5114 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5116 unsigned int exp_align = expr_align (exp);
5118 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5119 || bitpos % BITS_PER_UNIT != 0)
5120 abort ();
5122 target = change_address (target, VOIDmode,
5123 plus_constant (XEXP (target, 0),
5124 bitpos / BITS_PER_UNIT));
5126 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
5127 align = MIN (exp_align, align);
5129 /* Find an alignment that is consistent with the bit position. */
5130 while ((bitpos % align) != 0)
5131 align >>= 1;
5133 emit_block_move (target, temp,
5134 bitsize == -1 ? expr_size (exp)
5135 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5136 / BITS_PER_UNIT),
5137 align);
5139 return value_mode == VOIDmode ? const0_rtx : target;
5142 /* Store the value in the bitfield. */
5143 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
5144 if (value_mode != VOIDmode)
5146 /* The caller wants an rtx for the value. */
5147 /* If possible, avoid refetching from the bitfield itself. */
5148 if (width_mask != 0
5149 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5151 tree count;
5152 enum machine_mode tmode;
5154 if (unsignedp)
5155 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
5156 tmode = GET_MODE (temp);
5157 if (tmode == VOIDmode)
5158 tmode = value_mode;
5159 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5160 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5161 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5163 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5164 NULL_RTX, value_mode, 0, align,
5165 total_size);
5167 return const0_rtx;
5169 else
5171 rtx addr = XEXP (target, 0);
5172 rtx to_rtx;
5174 /* If a value is wanted, it must be the lhs;
5175 so make the address stable for multiple use. */
5177 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5178 && ! CONSTANT_ADDRESS_P (addr)
5179 /* A frame-pointer reference is already stable. */
5180 && ! (GET_CODE (addr) == PLUS
5181 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5182 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5183 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5184 addr = copy_to_reg (addr);
5186 /* Now build a reference to just the desired component. */
5188 to_rtx = copy_rtx (change_address (target, mode,
5189 plus_constant (addr,
5190 (bitpos
5191 / BITS_PER_UNIT))));
5192 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5193 MEM_ALIAS_SET (to_rtx) = alias_set;
5195 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5199 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5200 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
5201 ARRAY_REFs and find the ultimate containing object, which we return.
5203 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5204 bit position, and *PUNSIGNEDP to the signedness of the field.
5205 If the position of the field is variable, we store a tree
5206 giving the variable offset (in units) in *POFFSET.
5207 This offset is in addition to the bit position.
5208 If the position is not variable, we store 0 in *POFFSET.
5209 We set *PALIGNMENT to the alignment of the address that will be
5210 computed. This is the alignment of the thing we return if *POFFSET
5211 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5213 If any of the extraction expressions is volatile,
5214 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5216 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5217 is a mode that can be used to access the field. In that case, *PBITSIZE
5218 is redundant.
5220 If the field describes a variable-sized object, *PMODE is set to
5221 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5222 this case, but the address of the object can be found. */
5224 tree
5225 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5226 punsignedp, pvolatilep, palignment)
5227 tree exp;
5228 HOST_WIDE_INT *pbitsize;
5229 HOST_WIDE_INT *pbitpos;
5230 tree *poffset;
5231 enum machine_mode *pmode;
5232 int *punsignedp;
5233 int *pvolatilep;
5234 unsigned int *palignment;
5236 tree size_tree = 0;
5237 enum machine_mode mode = VOIDmode;
5238 tree offset = size_zero_node;
5239 tree bit_offset = bitsize_zero_node;
5240 unsigned int alignment = BIGGEST_ALIGNMENT;
5241 tree tem;
5243 /* First get the mode, signedness, and size. We do this from just the
5244 outermost expression. */
5245 if (TREE_CODE (exp) == COMPONENT_REF)
5247 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5248 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5249 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5251 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5253 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5255 size_tree = TREE_OPERAND (exp, 1);
5256 *punsignedp = TREE_UNSIGNED (exp);
5258 else
5260 mode = TYPE_MODE (TREE_TYPE (exp));
5261 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5263 if (mode == BLKmode)
5264 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5265 else
5266 *pbitsize = GET_MODE_BITSIZE (mode);
5269 if (size_tree != 0)
5271 if (! host_integerp (size_tree, 1))
5272 mode = BLKmode, *pbitsize = -1;
5273 else
5274 *pbitsize = tree_low_cst (size_tree, 1);
5277 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5278 and find the ultimate containing object. */
5279 while (1)
5281 if (TREE_CODE (exp) == BIT_FIELD_REF)
5282 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5283 else if (TREE_CODE (exp) == COMPONENT_REF)
5285 tree field = TREE_OPERAND (exp, 1);
5286 tree this_offset = DECL_FIELD_OFFSET (field);
5288 /* If this field hasn't been filled in yet, don't go
5289 past it. This should only happen when folding expressions
5290 made during type construction. */
5291 if (this_offset == 0)
5292 break;
5293 else if (! TREE_CONSTANT (this_offset)
5294 && contains_placeholder_p (this_offset))
5295 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5297 offset = size_binop (PLUS_EXPR, offset, this_offset);
5298 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5299 DECL_FIELD_BIT_OFFSET (field));
5301 if (! host_integerp (offset, 0))
5302 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5305 else if (TREE_CODE (exp) == ARRAY_REF)
5307 tree index = TREE_OPERAND (exp, 1);
5308 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5309 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5310 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (exp));
5312 /* We assume all arrays have sizes that are a multiple of a byte.
5313 First subtract the lower bound, if any, in the type of the
5314 index, then convert to sizetype and multiply by the size of the
5315 array element. */
5316 if (low_bound != 0 && ! integer_zerop (low_bound))
5317 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5318 index, low_bound));
5320 /* If the index has a self-referential type, pass it to a
5321 WITH_RECORD_EXPR; if the component size is, pass our
5322 component to one. */
5323 if (! TREE_CONSTANT (index)
5324 && contains_placeholder_p (index))
5325 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5326 if (! TREE_CONSTANT (unit_size)
5327 && contains_placeholder_p (unit_size))
5328 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size,
5329 TREE_OPERAND (exp, 0));
5331 offset = size_binop (PLUS_EXPR, offset,
5332 size_binop (MULT_EXPR,
5333 convert (sizetype, index),
5334 unit_size));
5337 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5338 && ! ((TREE_CODE (exp) == NOP_EXPR
5339 || TREE_CODE (exp) == CONVERT_EXPR)
5340 && (TYPE_MODE (TREE_TYPE (exp))
5341 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5342 break;
5344 /* If any reference in the chain is volatile, the effect is volatile. */
5345 if (TREE_THIS_VOLATILE (exp))
5346 *pvolatilep = 1;
5348 /* If the offset is non-constant already, then we can't assume any
5349 alignment more than the alignment here. */
5350 if (! TREE_CONSTANT (offset))
5351 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5353 exp = TREE_OPERAND (exp, 0);
5356 if (DECL_P (exp))
5357 alignment = MIN (alignment, DECL_ALIGN (exp));
5358 else if (TREE_TYPE (exp) != 0)
5359 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5361 /* If OFFSET is constant, see if we can return the whole thing as a
5362 constant bit position. Otherwise, split it up. */
5363 if (host_integerp (offset, 0)
5364 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5365 bitsize_unit_node))
5366 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5367 && host_integerp (tem, 0))
5368 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5369 else
5370 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5372 *pmode = mode;
5373 *palignment = alignment;
5374 return exp;
5377 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5379 static enum memory_use_mode
5380 get_memory_usage_from_modifier (modifier)
5381 enum expand_modifier modifier;
5383 switch (modifier)
5385 case EXPAND_NORMAL:
5386 case EXPAND_SUM:
5387 return MEMORY_USE_RO;
5388 break;
5389 case EXPAND_MEMORY_USE_WO:
5390 return MEMORY_USE_WO;
5391 break;
5392 case EXPAND_MEMORY_USE_RW:
5393 return MEMORY_USE_RW;
5394 break;
5395 case EXPAND_MEMORY_USE_DONT:
5396 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5397 MEMORY_USE_DONT, because they are modifiers to a call of
5398 expand_expr in the ADDR_EXPR case of expand_expr. */
5399 case EXPAND_CONST_ADDRESS:
5400 case EXPAND_INITIALIZER:
5401 return MEMORY_USE_DONT;
5402 case EXPAND_MEMORY_USE_BAD:
5403 default:
5404 abort ();
5408 /* Given an rtx VALUE that may contain additions and multiplications,
5409 return an equivalent value that just refers to a register or memory.
5410 This is done by generating instructions to perform the arithmetic
5411 and returning a pseudo-register containing the value.
5413 The returned value may be a REG, SUBREG, MEM or constant. */
5416 force_operand (value, target)
5417 rtx value, target;
5419 register optab binoptab = 0;
5420 /* Use a temporary to force order of execution of calls to
5421 `force_operand'. */
5422 rtx tmp;
5423 register rtx op2;
5424 /* Use subtarget as the target for operand 0 of a binary operation. */
5425 register rtx subtarget = get_subtarget (target);
5427 /* Check for a PIC address load. */
5428 if (flag_pic
5429 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5430 && XEXP (value, 0) == pic_offset_table_rtx
5431 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5432 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5433 || GET_CODE (XEXP (value, 1)) == CONST))
5435 if (!subtarget)
5436 subtarget = gen_reg_rtx (GET_MODE (value));
5437 emit_move_insn (subtarget, value);
5438 return subtarget;
5441 if (GET_CODE (value) == PLUS)
5442 binoptab = add_optab;
5443 else if (GET_CODE (value) == MINUS)
5444 binoptab = sub_optab;
5445 else if (GET_CODE (value) == MULT)
5447 op2 = XEXP (value, 1);
5448 if (!CONSTANT_P (op2)
5449 && !(GET_CODE (op2) == REG && op2 != subtarget))
5450 subtarget = 0;
5451 tmp = force_operand (XEXP (value, 0), subtarget);
5452 return expand_mult (GET_MODE (value), tmp,
5453 force_operand (op2, NULL_RTX),
5454 target, 1);
5457 if (binoptab)
5459 op2 = XEXP (value, 1);
5460 if (!CONSTANT_P (op2)
5461 && !(GET_CODE (op2) == REG && op2 != subtarget))
5462 subtarget = 0;
5463 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5465 binoptab = add_optab;
5466 op2 = negate_rtx (GET_MODE (value), op2);
5469 /* Check for an addition with OP2 a constant integer and our first
5470 operand a PLUS of a virtual register and something else. In that
5471 case, we want to emit the sum of the virtual register and the
5472 constant first and then add the other value. This allows virtual
5473 register instantiation to simply modify the constant rather than
5474 creating another one around this addition. */
5475 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5476 && GET_CODE (XEXP (value, 0)) == PLUS
5477 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5478 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5479 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5481 rtx temp = expand_binop (GET_MODE (value), binoptab,
5482 XEXP (XEXP (value, 0), 0), op2,
5483 subtarget, 0, OPTAB_LIB_WIDEN);
5484 return expand_binop (GET_MODE (value), binoptab, temp,
5485 force_operand (XEXP (XEXP (value, 0), 1), 0),
5486 target, 0, OPTAB_LIB_WIDEN);
5489 tmp = force_operand (XEXP (value, 0), subtarget);
5490 return expand_binop (GET_MODE (value), binoptab, tmp,
5491 force_operand (op2, NULL_RTX),
5492 target, 0, OPTAB_LIB_WIDEN);
5493 /* We give UNSIGNEDP = 0 to expand_binop
5494 because the only operations we are expanding here are signed ones. */
5496 return value;
5499 /* Subroutine of expand_expr:
5500 save the non-copied parts (LIST) of an expr (LHS), and return a list
5501 which can restore these values to their previous values,
5502 should something modify their storage. */
5504 static tree
5505 save_noncopied_parts (lhs, list)
5506 tree lhs;
5507 tree list;
5509 tree tail;
5510 tree parts = 0;
5512 for (tail = list; tail; tail = TREE_CHAIN (tail))
5513 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5514 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5515 else
5517 tree part = TREE_VALUE (tail);
5518 tree part_type = TREE_TYPE (part);
5519 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5520 rtx target
5521 = assign_temp (build_qualified_type (part_type,
5522 (TYPE_QUALS (part_type)
5523 | TYPE_QUAL_CONST)),
5524 0, 1, 1);
5526 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5527 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5528 parts = tree_cons (to_be_saved,
5529 build (RTL_EXPR, part_type, NULL_TREE,
5530 (tree) target),
5531 parts);
5532 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5534 return parts;
5537 /* Subroutine of expand_expr:
5538 record the non-copied parts (LIST) of an expr (LHS), and return a list
5539 which specifies the initial values of these parts. */
5541 static tree
5542 init_noncopied_parts (lhs, list)
5543 tree lhs;
5544 tree list;
5546 tree tail;
5547 tree parts = 0;
5549 for (tail = list; tail; tail = TREE_CHAIN (tail))
5550 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5551 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5552 else if (TREE_PURPOSE (tail))
5554 tree part = TREE_VALUE (tail);
5555 tree part_type = TREE_TYPE (part);
5556 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5557 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5559 return parts;
5562 /* Subroutine of expand_expr: return nonzero iff there is no way that
5563 EXP can reference X, which is being modified. TOP_P is nonzero if this
5564 call is going to be used to determine whether we need a temporary
5565 for EXP, as opposed to a recursive call to this function.
5567 It is always safe for this routine to return zero since it merely
5568 searches for optimization opportunities. */
5571 safe_from_p (x, exp, top_p)
5572 rtx x;
5573 tree exp;
5574 int top_p;
5576 rtx exp_rtl = 0;
5577 int i, nops;
5578 static tree save_expr_list;
5580 if (x == 0
5581 /* If EXP has varying size, we MUST use a target since we currently
5582 have no way of allocating temporaries of variable size
5583 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5584 So we assume here that something at a higher level has prevented a
5585 clash. This is somewhat bogus, but the best we can do. Only
5586 do this when X is BLKmode and when we are at the top level. */
5587 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5588 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5589 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5590 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5591 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5592 != INTEGER_CST)
5593 && GET_MODE (x) == BLKmode)
5594 /* If X is in the outgoing argument area, it is always safe. */
5595 || (GET_CODE (x) == MEM
5596 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5597 || (GET_CODE (XEXP (x, 0)) == PLUS
5598 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5599 return 1;
5601 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5602 find the underlying pseudo. */
5603 if (GET_CODE (x) == SUBREG)
5605 x = SUBREG_REG (x);
5606 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5607 return 0;
5610 /* A SAVE_EXPR might appear many times in the expression passed to the
5611 top-level safe_from_p call, and if it has a complex subexpression,
5612 examining it multiple times could result in a combinatorial explosion.
5613 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5614 with optimization took about 28 minutes to compile -- even though it was
5615 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5616 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5617 we have processed. Note that the only test of top_p was above. */
5619 if (top_p)
5621 int rtn;
5622 tree t;
5624 save_expr_list = 0;
5626 rtn = safe_from_p (x, exp, 0);
5628 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5629 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5631 return rtn;
5634 /* Now look at our tree code and possibly recurse. */
5635 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5637 case 'd':
5638 exp_rtl = DECL_RTL (exp);
5639 break;
5641 case 'c':
5642 return 1;
5644 case 'x':
5645 if (TREE_CODE (exp) == TREE_LIST)
5646 return ((TREE_VALUE (exp) == 0
5647 || safe_from_p (x, TREE_VALUE (exp), 0))
5648 && (TREE_CHAIN (exp) == 0
5649 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5650 else if (TREE_CODE (exp) == ERROR_MARK)
5651 return 1; /* An already-visited SAVE_EXPR? */
5652 else
5653 return 0;
5655 case '1':
5656 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5658 case '2':
5659 case '<':
5660 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5661 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5663 case 'e':
5664 case 'r':
5665 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5666 the expression. If it is set, we conflict iff we are that rtx or
5667 both are in memory. Otherwise, we check all operands of the
5668 expression recursively. */
5670 switch (TREE_CODE (exp))
5672 case ADDR_EXPR:
5673 return (staticp (TREE_OPERAND (exp, 0))
5674 || TREE_STATIC (exp)
5675 || safe_from_p (x, TREE_OPERAND (exp, 0), 0));
5677 case INDIRECT_REF:
5678 if (GET_CODE (x) == MEM
5679 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5680 get_alias_set (exp)))
5681 return 0;
5682 break;
5684 case CALL_EXPR:
5685 /* Assume that the call will clobber all hard registers and
5686 all of memory. */
5687 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5688 || GET_CODE (x) == MEM)
5689 return 0;
5690 break;
5692 case RTL_EXPR:
5693 /* If a sequence exists, we would have to scan every instruction
5694 in the sequence to see if it was safe. This is probably not
5695 worthwhile. */
5696 if (RTL_EXPR_SEQUENCE (exp))
5697 return 0;
5699 exp_rtl = RTL_EXPR_RTL (exp);
5700 break;
5702 case WITH_CLEANUP_EXPR:
5703 exp_rtl = RTL_EXPR_RTL (exp);
5704 break;
5706 case CLEANUP_POINT_EXPR:
5707 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5709 case SAVE_EXPR:
5710 exp_rtl = SAVE_EXPR_RTL (exp);
5711 if (exp_rtl)
5712 break;
5714 /* If we've already scanned this, don't do it again. Otherwise,
5715 show we've scanned it and record for clearing the flag if we're
5716 going on. */
5717 if (TREE_PRIVATE (exp))
5718 return 1;
5720 TREE_PRIVATE (exp) = 1;
5721 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5723 TREE_PRIVATE (exp) = 0;
5724 return 0;
5727 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5728 return 1;
5730 case BIND_EXPR:
5731 /* The only operand we look at is operand 1. The rest aren't
5732 part of the expression. */
5733 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5735 case METHOD_CALL_EXPR:
5736 /* This takes a rtx argument, but shouldn't appear here. */
5737 abort ();
5739 default:
5740 break;
5743 /* If we have an rtx, we do not need to scan our operands. */
5744 if (exp_rtl)
5745 break;
5747 nops = first_rtl_op (TREE_CODE (exp));
5748 for (i = 0; i < nops; i++)
5749 if (TREE_OPERAND (exp, i) != 0
5750 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5751 return 0;
5753 /* If this is a language-specific tree code, it may require
5754 special handling. */
5755 if (TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE
5756 && lang_safe_from_p
5757 && !(*lang_safe_from_p) (x, exp))
5758 return 0;
5761 /* If we have an rtl, find any enclosed object. Then see if we conflict
5762 with it. */
5763 if (exp_rtl)
5765 if (GET_CODE (exp_rtl) == SUBREG)
5767 exp_rtl = SUBREG_REG (exp_rtl);
5768 if (GET_CODE (exp_rtl) == REG
5769 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5770 return 0;
5773 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5774 are memory and they conflict. */
5775 return ! (rtx_equal_p (x, exp_rtl)
5776 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5777 && true_dependence (exp_rtl, GET_MODE (x), x,
5778 rtx_addr_varies_p)));
5781 /* If we reach here, it is safe. */
5782 return 1;
5785 /* Subroutine of expand_expr: return nonzero iff EXP is an
5786 expression whose type is statically determinable. */
5788 static int
5789 fixed_type_p (exp)
5790 tree exp;
5792 if (TREE_CODE (exp) == PARM_DECL
5793 || TREE_CODE (exp) == VAR_DECL
5794 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5795 || TREE_CODE (exp) == COMPONENT_REF
5796 || TREE_CODE (exp) == ARRAY_REF)
5797 return 1;
5798 return 0;
5801 /* Subroutine of expand_expr: return rtx if EXP is a
5802 variable or parameter; else return 0. */
5804 static rtx
5805 var_rtx (exp)
5806 tree exp;
5808 STRIP_NOPS (exp);
5809 switch (TREE_CODE (exp))
5811 case PARM_DECL:
5812 case VAR_DECL:
5813 return DECL_RTL (exp);
5814 default:
5815 return 0;
5819 #ifdef MAX_INTEGER_COMPUTATION_MODE
5820 void
5821 check_max_integer_computation_mode (exp)
5822 tree exp;
5824 enum tree_code code;
5825 enum machine_mode mode;
5827 /* Strip any NOPs that don't change the mode. */
5828 STRIP_NOPS (exp);
5829 code = TREE_CODE (exp);
5831 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5832 if (code == NOP_EXPR
5833 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5834 return;
5836 /* First check the type of the overall operation. We need only look at
5837 unary, binary and relational operations. */
5838 if (TREE_CODE_CLASS (code) == '1'
5839 || TREE_CODE_CLASS (code) == '2'
5840 || TREE_CODE_CLASS (code) == '<')
5842 mode = TYPE_MODE (TREE_TYPE (exp));
5843 if (GET_MODE_CLASS (mode) == MODE_INT
5844 && mode > MAX_INTEGER_COMPUTATION_MODE)
5845 fatal ("unsupported wide integer operation");
5848 /* Check operand of a unary op. */
5849 if (TREE_CODE_CLASS (code) == '1')
5851 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5852 if (GET_MODE_CLASS (mode) == MODE_INT
5853 && mode > MAX_INTEGER_COMPUTATION_MODE)
5854 fatal ("unsupported wide integer operation");
5857 /* Check operands of a binary/comparison op. */
5858 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5860 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5861 if (GET_MODE_CLASS (mode) == MODE_INT
5862 && mode > MAX_INTEGER_COMPUTATION_MODE)
5863 fatal ("unsupported wide integer operation");
5865 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5866 if (GET_MODE_CLASS (mode) == MODE_INT
5867 && mode > MAX_INTEGER_COMPUTATION_MODE)
5868 fatal ("unsupported wide integer operation");
5871 #endif
5873 /* expand_expr: generate code for computing expression EXP.
5874 An rtx for the computed value is returned. The value is never null.
5875 In the case of a void EXP, const0_rtx is returned.
5877 The value may be stored in TARGET if TARGET is nonzero.
5878 TARGET is just a suggestion; callers must assume that
5879 the rtx returned may not be the same as TARGET.
5881 If TARGET is CONST0_RTX, it means that the value will be ignored.
5883 If TMODE is not VOIDmode, it suggests generating the
5884 result in mode TMODE. But this is done only when convenient.
5885 Otherwise, TMODE is ignored and the value generated in its natural mode.
5886 TMODE is just a suggestion; callers must assume that
5887 the rtx returned may not have mode TMODE.
5889 Note that TARGET may have neither TMODE nor MODE. In that case, it
5890 probably will not be used.
5892 If MODIFIER is EXPAND_SUM then when EXP is an addition
5893 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5894 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5895 products as above, or REG or MEM, or constant.
5896 Ordinarily in such cases we would output mul or add instructions
5897 and then return a pseudo reg containing the sum.
5899 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5900 it also marks a label as absolutely required (it can't be dead).
5901 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5902 This is used for outputting expressions used in initializers.
5904 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5905 with a constant address even if that address is not normally legitimate.
5906 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5909 expand_expr (exp, target, tmode, modifier)
5910 register tree exp;
5911 rtx target;
5912 enum machine_mode tmode;
5913 enum expand_modifier modifier;
5915 register rtx op0, op1, temp;
5916 tree type = TREE_TYPE (exp);
5917 int unsignedp = TREE_UNSIGNED (type);
5918 register enum machine_mode mode;
5919 register enum tree_code code = TREE_CODE (exp);
5920 optab this_optab;
5921 rtx subtarget, original_target;
5922 int ignore;
5923 tree context;
5924 /* Used by check-memory-usage to make modifier read only. */
5925 enum expand_modifier ro_modifier;
5927 /* Handle ERROR_MARK before anybody tries to access its type. */
5928 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
5930 op0 = CONST0_RTX (tmode);
5931 if (op0 != 0)
5932 return op0;
5933 return const0_rtx;
5936 mode = TYPE_MODE (type);
5937 /* Use subtarget as the target for operand 0 of a binary operation. */
5938 subtarget = get_subtarget (target);
5939 original_target = target;
5940 ignore = (target == const0_rtx
5941 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5942 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5943 || code == COND_EXPR)
5944 && TREE_CODE (type) == VOID_TYPE));
5946 /* Make a read-only version of the modifier. */
5947 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5948 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5949 ro_modifier = modifier;
5950 else
5951 ro_modifier = EXPAND_NORMAL;
5953 /* If we are going to ignore this result, we need only do something
5954 if there is a side-effect somewhere in the expression. If there
5955 is, short-circuit the most common cases here. Note that we must
5956 not call expand_expr with anything but const0_rtx in case this
5957 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5959 if (ignore)
5961 if (! TREE_SIDE_EFFECTS (exp))
5962 return const0_rtx;
5964 /* Ensure we reference a volatile object even if value is ignored, but
5965 don't do this if all we are doing is taking its address. */
5966 if (TREE_THIS_VOLATILE (exp)
5967 && TREE_CODE (exp) != FUNCTION_DECL
5968 && mode != VOIDmode && mode != BLKmode
5969 && modifier != EXPAND_CONST_ADDRESS)
5971 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5972 if (GET_CODE (temp) == MEM)
5973 temp = copy_to_reg (temp);
5974 return const0_rtx;
5977 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
5978 || code == INDIRECT_REF || code == BUFFER_REF)
5979 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5980 VOIDmode, ro_modifier);
5981 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
5982 || code == ARRAY_REF)
5984 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5985 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5986 return const0_rtx;
5988 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5989 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5990 /* If the second operand has no side effects, just evaluate
5991 the first. */
5992 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5993 VOIDmode, ro_modifier);
5994 else if (code == BIT_FIELD_REF)
5996 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5997 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5998 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
5999 return const0_rtx;
6002 target = 0;
6005 #ifdef MAX_INTEGER_COMPUTATION_MODE
6006 /* Only check stuff here if the mode we want is different from the mode
6007 of the expression; if it's the same, check_max_integer_computiation_mode
6008 will handle it. Do we really need to check this stuff at all? */
6010 if (target
6011 && GET_MODE (target) != mode
6012 && TREE_CODE (exp) != INTEGER_CST
6013 && TREE_CODE (exp) != PARM_DECL
6014 && TREE_CODE (exp) != ARRAY_REF
6015 && TREE_CODE (exp) != COMPONENT_REF
6016 && TREE_CODE (exp) != BIT_FIELD_REF
6017 && TREE_CODE (exp) != INDIRECT_REF
6018 && TREE_CODE (exp) != CALL_EXPR
6019 && TREE_CODE (exp) != VAR_DECL
6020 && TREE_CODE (exp) != RTL_EXPR)
6022 enum machine_mode mode = GET_MODE (target);
6024 if (GET_MODE_CLASS (mode) == MODE_INT
6025 && mode > MAX_INTEGER_COMPUTATION_MODE)
6026 fatal ("unsupported wide integer operation");
6029 if (tmode != mode
6030 && TREE_CODE (exp) != INTEGER_CST
6031 && TREE_CODE (exp) != PARM_DECL
6032 && TREE_CODE (exp) != ARRAY_REF
6033 && TREE_CODE (exp) != COMPONENT_REF
6034 && TREE_CODE (exp) != BIT_FIELD_REF
6035 && TREE_CODE (exp) != INDIRECT_REF
6036 && TREE_CODE (exp) != VAR_DECL
6037 && TREE_CODE (exp) != CALL_EXPR
6038 && TREE_CODE (exp) != RTL_EXPR
6039 && GET_MODE_CLASS (tmode) == MODE_INT
6040 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6041 fatal ("unsupported wide integer operation");
6043 check_max_integer_computation_mode (exp);
6044 #endif
6046 /* If will do cse, generate all results into pseudo registers
6047 since 1) that allows cse to find more things
6048 and 2) otherwise cse could produce an insn the machine
6049 cannot support. */
6051 if (! cse_not_expected && mode != BLKmode && target
6052 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
6053 target = subtarget;
6055 switch (code)
6057 case LABEL_DECL:
6059 tree function = decl_function_context (exp);
6060 /* Handle using a label in a containing function. */
6061 if (function != current_function_decl
6062 && function != inline_function_decl && function != 0)
6064 struct function *p = find_function_data (function);
6065 p->expr->x_forced_labels
6066 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6067 p->expr->x_forced_labels);
6069 else
6071 if (modifier == EXPAND_INITIALIZER)
6072 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6073 label_rtx (exp),
6074 forced_labels);
6077 temp = gen_rtx_MEM (FUNCTION_MODE,
6078 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6079 if (function != current_function_decl
6080 && function != inline_function_decl && function != 0)
6081 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6082 return temp;
6085 case PARM_DECL:
6086 if (DECL_RTL (exp) == 0)
6088 error_with_decl (exp, "prior parameter's size depends on `%s'");
6089 return CONST0_RTX (mode);
6092 /* ... fall through ... */
6094 case VAR_DECL:
6095 /* If a static var's type was incomplete when the decl was written,
6096 but the type is complete now, lay out the decl now. */
6097 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6098 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6100 layout_decl (exp, 0);
6101 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
6104 /* Although static-storage variables start off initialized, according to
6105 ANSI C, a memcpy could overwrite them with uninitialized values. So
6106 we check them too. This also lets us check for read-only variables
6107 accessed via a non-const declaration, in case it won't be detected
6108 any other way (e.g., in an embedded system or OS kernel without
6109 memory protection).
6111 Aggregates are not checked here; they're handled elsewhere. */
6112 if (cfun && current_function_check_memory_usage
6113 && code == VAR_DECL
6114 && GET_CODE (DECL_RTL (exp)) == MEM
6115 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6117 enum memory_use_mode memory_usage;
6118 memory_usage = get_memory_usage_from_modifier (modifier);
6120 in_check_memory_usage = 1;
6121 if (memory_usage != MEMORY_USE_DONT)
6122 emit_library_call (chkr_check_addr_libfunc,
6123 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6124 XEXP (DECL_RTL (exp), 0), Pmode,
6125 GEN_INT (int_size_in_bytes (type)),
6126 TYPE_MODE (sizetype),
6127 GEN_INT (memory_usage),
6128 TYPE_MODE (integer_type_node));
6129 in_check_memory_usage = 0;
6132 /* ... fall through ... */
6134 case FUNCTION_DECL:
6135 case RESULT_DECL:
6136 if (DECL_RTL (exp) == 0)
6137 abort ();
6139 /* Ensure variable marked as used even if it doesn't go through
6140 a parser. If it hasn't be used yet, write out an external
6141 definition. */
6142 if (! TREE_USED (exp))
6144 assemble_external (exp);
6145 TREE_USED (exp) = 1;
6148 /* Show we haven't gotten RTL for this yet. */
6149 temp = 0;
6151 /* Handle variables inherited from containing functions. */
6152 context = decl_function_context (exp);
6154 /* We treat inline_function_decl as an alias for the current function
6155 because that is the inline function whose vars, types, etc.
6156 are being merged into the current function.
6157 See expand_inline_function. */
6159 if (context != 0 && context != current_function_decl
6160 && context != inline_function_decl
6161 /* If var is static, we don't need a static chain to access it. */
6162 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6163 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6165 rtx addr;
6167 /* Mark as non-local and addressable. */
6168 DECL_NONLOCAL (exp) = 1;
6169 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6170 abort ();
6171 mark_addressable (exp);
6172 if (GET_CODE (DECL_RTL (exp)) != MEM)
6173 abort ();
6174 addr = XEXP (DECL_RTL (exp), 0);
6175 if (GET_CODE (addr) == MEM)
6176 addr = change_address (addr, Pmode,
6177 fix_lexical_addr (XEXP (addr, 0), exp));
6178 else
6179 addr = fix_lexical_addr (addr, exp);
6181 temp = change_address (DECL_RTL (exp), mode, addr);
6184 /* This is the case of an array whose size is to be determined
6185 from its initializer, while the initializer is still being parsed.
6186 See expand_decl. */
6188 else if (GET_CODE (DECL_RTL (exp)) == MEM
6189 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6190 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
6191 XEXP (DECL_RTL (exp), 0));
6193 /* If DECL_RTL is memory, we are in the normal case and either
6194 the address is not valid or it is not a register and -fforce-addr
6195 is specified, get the address into a register. */
6197 else if (GET_CODE (DECL_RTL (exp)) == MEM
6198 && modifier != EXPAND_CONST_ADDRESS
6199 && modifier != EXPAND_SUM
6200 && modifier != EXPAND_INITIALIZER
6201 && (! memory_address_p (DECL_MODE (exp),
6202 XEXP (DECL_RTL (exp), 0))
6203 || (flag_force_addr
6204 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6205 temp = change_address (DECL_RTL (exp), VOIDmode,
6206 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6208 /* If we got something, return it. But first, set the alignment
6209 the address is a register. */
6210 if (temp != 0)
6212 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6213 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6215 return temp;
6218 /* If the mode of DECL_RTL does not match that of the decl, it
6219 must be a promoted value. We return a SUBREG of the wanted mode,
6220 but mark it so that we know that it was already extended. */
6222 if (GET_CODE (DECL_RTL (exp)) == REG
6223 && GET_MODE (DECL_RTL (exp)) != mode)
6225 /* Get the signedness used for this variable. Ensure we get the
6226 same mode we got when the variable was declared. */
6227 if (GET_MODE (DECL_RTL (exp))
6228 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6229 abort ();
6231 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
6232 SUBREG_PROMOTED_VAR_P (temp) = 1;
6233 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6234 return temp;
6237 return DECL_RTL (exp);
6239 case INTEGER_CST:
6240 return immed_double_const (TREE_INT_CST_LOW (exp),
6241 TREE_INT_CST_HIGH (exp), mode);
6243 case CONST_DECL:
6244 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6245 EXPAND_MEMORY_USE_BAD);
6247 case REAL_CST:
6248 /* If optimized, generate immediate CONST_DOUBLE
6249 which will be turned into memory by reload if necessary.
6251 We used to force a register so that loop.c could see it. But
6252 this does not allow gen_* patterns to perform optimizations with
6253 the constants. It also produces two insns in cases like "x = 1.0;".
6254 On most machines, floating-point constants are not permitted in
6255 many insns, so we'd end up copying it to a register in any case.
6257 Now, we do the copying in expand_binop, if appropriate. */
6258 return immed_real_const (exp);
6260 case COMPLEX_CST:
6261 case STRING_CST:
6262 if (! TREE_CST_RTL (exp))
6263 output_constant_def (exp, 1);
6265 /* TREE_CST_RTL probably contains a constant address.
6266 On RISC machines where a constant address isn't valid,
6267 make some insns to get that address into a register. */
6268 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6269 && modifier != EXPAND_CONST_ADDRESS
6270 && modifier != EXPAND_INITIALIZER
6271 && modifier != EXPAND_SUM
6272 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6273 || (flag_force_addr
6274 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6275 return change_address (TREE_CST_RTL (exp), VOIDmode,
6276 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6277 return TREE_CST_RTL (exp);
6279 case EXPR_WITH_FILE_LOCATION:
6281 rtx to_return;
6282 const char *saved_input_filename = input_filename;
6283 int saved_lineno = lineno;
6284 input_filename = EXPR_WFL_FILENAME (exp);
6285 lineno = EXPR_WFL_LINENO (exp);
6286 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6287 emit_line_note (input_filename, lineno);
6288 /* Possibly avoid switching back and force here. */
6289 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6290 input_filename = saved_input_filename;
6291 lineno = saved_lineno;
6292 return to_return;
6295 case SAVE_EXPR:
6296 context = decl_function_context (exp);
6298 /* If this SAVE_EXPR was at global context, assume we are an
6299 initialization function and move it into our context. */
6300 if (context == 0)
6301 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6303 /* We treat inline_function_decl as an alias for the current function
6304 because that is the inline function whose vars, types, etc.
6305 are being merged into the current function.
6306 See expand_inline_function. */
6307 if (context == current_function_decl || context == inline_function_decl)
6308 context = 0;
6310 /* If this is non-local, handle it. */
6311 if (context)
6313 /* The following call just exists to abort if the context is
6314 not of a containing function. */
6315 find_function_data (context);
6317 temp = SAVE_EXPR_RTL (exp);
6318 if (temp && GET_CODE (temp) == REG)
6320 put_var_into_stack (exp);
6321 temp = SAVE_EXPR_RTL (exp);
6323 if (temp == 0 || GET_CODE (temp) != MEM)
6324 abort ();
6325 return change_address (temp, mode,
6326 fix_lexical_addr (XEXP (temp, 0), exp));
6328 if (SAVE_EXPR_RTL (exp) == 0)
6330 if (mode == VOIDmode)
6331 temp = const0_rtx;
6332 else
6333 temp = assign_temp (build_qualified_type (type,
6334 (TYPE_QUALS (type)
6335 | TYPE_QUAL_CONST)),
6336 3, 0, 0);
6338 SAVE_EXPR_RTL (exp) = temp;
6339 if (!optimize && GET_CODE (temp) == REG)
6340 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6341 save_expr_regs);
6343 /* If the mode of TEMP does not match that of the expression, it
6344 must be a promoted value. We pass store_expr a SUBREG of the
6345 wanted mode but mark it so that we know that it was already
6346 extended. Note that `unsignedp' was modified above in
6347 this case. */
6349 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6351 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6352 SUBREG_PROMOTED_VAR_P (temp) = 1;
6353 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6356 if (temp == const0_rtx)
6357 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6358 EXPAND_MEMORY_USE_BAD);
6359 else
6360 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6362 TREE_USED (exp) = 1;
6365 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6366 must be a promoted value. We return a SUBREG of the wanted mode,
6367 but mark it so that we know that it was already extended. */
6369 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6370 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6372 /* Compute the signedness and make the proper SUBREG. */
6373 promote_mode (type, mode, &unsignedp, 0);
6374 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6375 SUBREG_PROMOTED_VAR_P (temp) = 1;
6376 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6377 return temp;
6380 return SAVE_EXPR_RTL (exp);
6382 case UNSAVE_EXPR:
6384 rtx temp;
6385 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6386 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6387 return temp;
6390 case PLACEHOLDER_EXPR:
6392 tree placeholder_expr;
6394 /* If there is an object on the head of the placeholder list,
6395 see if some object in it of type TYPE or a pointer to it. For
6396 further information, see tree.def. */
6397 for (placeholder_expr = placeholder_list;
6398 placeholder_expr != 0;
6399 placeholder_expr = TREE_CHAIN (placeholder_expr))
6401 tree need_type = TYPE_MAIN_VARIANT (type);
6402 tree object = 0;
6403 tree old_list = placeholder_list;
6404 tree elt;
6406 /* Find the outermost reference that is of the type we want.
6407 If none, see if any object has a type that is a pointer to
6408 the type we want. */
6409 for (elt = TREE_PURPOSE (placeholder_expr);
6410 elt != 0 && object == 0;
6412 = ((TREE_CODE (elt) == COMPOUND_EXPR
6413 || TREE_CODE (elt) == COND_EXPR)
6414 ? TREE_OPERAND (elt, 1)
6415 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6416 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6417 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6418 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6419 ? TREE_OPERAND (elt, 0) : 0))
6420 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6421 object = elt;
6423 for (elt = TREE_PURPOSE (placeholder_expr);
6424 elt != 0 && object == 0;
6426 = ((TREE_CODE (elt) == COMPOUND_EXPR
6427 || TREE_CODE (elt) == COND_EXPR)
6428 ? TREE_OPERAND (elt, 1)
6429 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6430 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6431 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6432 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6433 ? TREE_OPERAND (elt, 0) : 0))
6434 if (POINTER_TYPE_P (TREE_TYPE (elt))
6435 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6436 == need_type))
6437 object = build1 (INDIRECT_REF, need_type, elt);
6439 if (object != 0)
6441 /* Expand this object skipping the list entries before
6442 it was found in case it is also a PLACEHOLDER_EXPR.
6443 In that case, we want to translate it using subsequent
6444 entries. */
6445 placeholder_list = TREE_CHAIN (placeholder_expr);
6446 temp = expand_expr (object, original_target, tmode,
6447 ro_modifier);
6448 placeholder_list = old_list;
6449 return temp;
6454 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6455 abort ();
6457 case WITH_RECORD_EXPR:
6458 /* Put the object on the placeholder list, expand our first operand,
6459 and pop the list. */
6460 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6461 placeholder_list);
6462 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6463 tmode, ro_modifier);
6464 placeholder_list = TREE_CHAIN (placeholder_list);
6465 return target;
6467 case GOTO_EXPR:
6468 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6469 expand_goto (TREE_OPERAND (exp, 0));
6470 else
6471 expand_computed_goto (TREE_OPERAND (exp, 0));
6472 return const0_rtx;
6474 case EXIT_EXPR:
6475 expand_exit_loop_if_false (NULL_PTR,
6476 invert_truthvalue (TREE_OPERAND (exp, 0)));
6477 return const0_rtx;
6479 case LABELED_BLOCK_EXPR:
6480 if (LABELED_BLOCK_BODY (exp))
6481 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6482 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6483 return const0_rtx;
6485 case EXIT_BLOCK_EXPR:
6486 if (EXIT_BLOCK_RETURN (exp))
6487 sorry ("returned value in block_exit_expr");
6488 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6489 return const0_rtx;
6491 case LOOP_EXPR:
6492 push_temp_slots ();
6493 expand_start_loop (1);
6494 expand_expr_stmt (TREE_OPERAND (exp, 0));
6495 expand_end_loop ();
6496 pop_temp_slots ();
6498 return const0_rtx;
6500 case BIND_EXPR:
6502 tree vars = TREE_OPERAND (exp, 0);
6503 int vars_need_expansion = 0;
6505 /* Need to open a binding contour here because
6506 if there are any cleanups they must be contained here. */
6507 expand_start_bindings (2);
6509 /* Mark the corresponding BLOCK for output in its proper place. */
6510 if (TREE_OPERAND (exp, 2) != 0
6511 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6512 insert_block (TREE_OPERAND (exp, 2));
6514 /* If VARS have not yet been expanded, expand them now. */
6515 while (vars)
6517 if (DECL_RTL (vars) == 0)
6519 vars_need_expansion = 1;
6520 expand_decl (vars);
6522 expand_decl_init (vars);
6523 vars = TREE_CHAIN (vars);
6526 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6528 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6530 return temp;
6533 case RTL_EXPR:
6534 if (RTL_EXPR_SEQUENCE (exp))
6536 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6537 abort ();
6538 emit_insns (RTL_EXPR_SEQUENCE (exp));
6539 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6541 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6542 free_temps_for_rtl_expr (exp);
6543 return RTL_EXPR_RTL (exp);
6545 case CONSTRUCTOR:
6546 /* If we don't need the result, just ensure we evaluate any
6547 subexpressions. */
6548 if (ignore)
6550 tree elt;
6551 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6552 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6553 EXPAND_MEMORY_USE_BAD);
6554 return const0_rtx;
6557 /* All elts simple constants => refer to a constant in memory. But
6558 if this is a non-BLKmode mode, let it store a field at a time
6559 since that should make a CONST_INT or CONST_DOUBLE when we
6560 fold. Likewise, if we have a target we can use, it is best to
6561 store directly into the target unless the type is large enough
6562 that memcpy will be used. If we are making an initializer and
6563 all operands are constant, put it in memory as well. */
6564 else if ((TREE_STATIC (exp)
6565 && ((mode == BLKmode
6566 && ! (target != 0 && safe_from_p (target, exp, 1)))
6567 || TREE_ADDRESSABLE (exp)
6568 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6569 && (! MOVE_BY_PIECES_P
6570 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6571 TYPE_ALIGN (type)))
6572 && ! mostly_zeros_p (exp))))
6573 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6575 rtx constructor = output_constant_def (exp, 1);
6577 if (modifier != EXPAND_CONST_ADDRESS
6578 && modifier != EXPAND_INITIALIZER
6579 && modifier != EXPAND_SUM
6580 && (! memory_address_p (GET_MODE (constructor),
6581 XEXP (constructor, 0))
6582 || (flag_force_addr
6583 && GET_CODE (XEXP (constructor, 0)) != REG)))
6584 constructor = change_address (constructor, VOIDmode,
6585 XEXP (constructor, 0));
6586 return constructor;
6588 else
6590 /* Handle calls that pass values in multiple non-contiguous
6591 locations. The Irix 6 ABI has examples of this. */
6592 if (target == 0 || ! safe_from_p (target, exp, 1)
6593 || GET_CODE (target) == PARALLEL)
6594 target
6595 = assign_temp (build_qualified_type (type,
6596 (TYPE_QUALS (type)
6597 | (TREE_READONLY (exp)
6598 * TYPE_QUAL_CONST))),
6599 TREE_ADDRESSABLE (exp), 1, 1);
6601 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6602 int_size_in_bytes (TREE_TYPE (exp)));
6603 return target;
6606 case INDIRECT_REF:
6608 tree exp1 = TREE_OPERAND (exp, 0);
6609 tree index;
6610 tree string = string_constant (exp1, &index);
6612 /* Try to optimize reads from const strings. */
6613 if (string
6614 && TREE_CODE (string) == STRING_CST
6615 && TREE_CODE (index) == INTEGER_CST
6616 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6617 && GET_MODE_CLASS (mode) == MODE_INT
6618 && GET_MODE_SIZE (mode) == 1
6619 && modifier != EXPAND_MEMORY_USE_WO)
6620 return
6621 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6623 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6624 op0 = memory_address (mode, op0);
6626 if (cfun && current_function_check_memory_usage
6627 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6629 enum memory_use_mode memory_usage;
6630 memory_usage = get_memory_usage_from_modifier (modifier);
6632 if (memory_usage != MEMORY_USE_DONT)
6634 in_check_memory_usage = 1;
6635 emit_library_call (chkr_check_addr_libfunc,
6636 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6637 Pmode, GEN_INT (int_size_in_bytes (type)),
6638 TYPE_MODE (sizetype),
6639 GEN_INT (memory_usage),
6640 TYPE_MODE (integer_type_node));
6641 in_check_memory_usage = 0;
6645 temp = gen_rtx_MEM (mode, op0);
6646 set_mem_attributes (temp, exp, 0);
6648 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6649 here, because, in C and C++, the fact that a location is accessed
6650 through a pointer to const does not mean that the value there can
6651 never change. Languages where it can never change should
6652 also set TREE_STATIC. */
6653 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6655 /* If we are writing to this object and its type is a record with
6656 readonly fields, we must mark it as readonly so it will
6657 conflict with readonly references to those fields. */
6658 if (modifier == EXPAND_MEMORY_USE_WO && readonly_fields_p (type))
6659 RTX_UNCHANGING_P (temp) = 1;
6661 return temp;
6664 case ARRAY_REF:
6665 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6666 abort ();
6669 tree array = TREE_OPERAND (exp, 0);
6670 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6671 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6672 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6673 HOST_WIDE_INT i;
6675 /* Optimize the special-case of a zero lower bound.
6677 We convert the low_bound to sizetype to avoid some problems
6678 with constant folding. (E.g. suppose the lower bound is 1,
6679 and its mode is QI. Without the conversion, (ARRAY
6680 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6681 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6683 if (! integer_zerop (low_bound))
6684 index = size_diffop (index, convert (sizetype, low_bound));
6686 /* Fold an expression like: "foo"[2].
6687 This is not done in fold so it won't happen inside &.
6688 Don't fold if this is for wide characters since it's too
6689 difficult to do correctly and this is a very rare case. */
6691 if (TREE_CODE (array) == STRING_CST
6692 && TREE_CODE (index) == INTEGER_CST
6693 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6694 && GET_MODE_CLASS (mode) == MODE_INT
6695 && GET_MODE_SIZE (mode) == 1)
6696 return
6697 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6699 /* If this is a constant index into a constant array,
6700 just get the value from the array. Handle both the cases when
6701 we have an explicit constructor and when our operand is a variable
6702 that was declared const. */
6704 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6705 && TREE_CODE (index) == INTEGER_CST
6706 && 0 > compare_tree_int (index,
6707 list_length (CONSTRUCTOR_ELTS
6708 (TREE_OPERAND (exp, 0)))))
6710 tree elem;
6712 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6713 i = TREE_INT_CST_LOW (index);
6714 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6717 if (elem)
6718 return expand_expr (fold (TREE_VALUE (elem)), target,
6719 tmode, ro_modifier);
6722 else if (optimize >= 1
6723 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6724 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6725 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6727 if (TREE_CODE (index) == INTEGER_CST)
6729 tree init = DECL_INITIAL (array);
6731 if (TREE_CODE (init) == CONSTRUCTOR)
6733 tree elem;
6735 for (elem = CONSTRUCTOR_ELTS (init);
6736 (elem
6737 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6738 elem = TREE_CHAIN (elem))
6741 if (elem)
6742 return expand_expr (fold (TREE_VALUE (elem)), target,
6743 tmode, ro_modifier);
6745 else if (TREE_CODE (init) == STRING_CST
6746 && 0 > compare_tree_int (index,
6747 TREE_STRING_LENGTH (init)))
6749 tree type = TREE_TYPE (TREE_TYPE (init));
6750 enum machine_mode mode = TYPE_MODE (type);
6752 if (GET_MODE_CLASS (mode) == MODE_INT
6753 && GET_MODE_SIZE (mode) == 1)
6754 return (GEN_INT
6755 (TREE_STRING_POINTER
6756 (init)[TREE_INT_CST_LOW (index)]));
6761 /* Fall through. */
6763 case COMPONENT_REF:
6764 case BIT_FIELD_REF:
6765 /* If the operand is a CONSTRUCTOR, we can just extract the
6766 appropriate field if it is present. Don't do this if we have
6767 already written the data since we want to refer to that copy
6768 and varasm.c assumes that's what we'll do. */
6769 if (code != ARRAY_REF
6770 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6771 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6773 tree elt;
6775 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6776 elt = TREE_CHAIN (elt))
6777 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6778 /* We can normally use the value of the field in the
6779 CONSTRUCTOR. However, if this is a bitfield in
6780 an integral mode that we can fit in a HOST_WIDE_INT,
6781 we must mask only the number of bits in the bitfield,
6782 since this is done implicitly by the constructor. If
6783 the bitfield does not meet either of those conditions,
6784 we can't do this optimization. */
6785 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6786 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6787 == MODE_INT)
6788 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6789 <= HOST_BITS_PER_WIDE_INT))))
6791 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6792 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6794 HOST_WIDE_INT bitsize
6795 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6797 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6799 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6800 op0 = expand_and (op0, op1, target);
6802 else
6804 enum machine_mode imode
6805 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6806 tree count
6807 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6810 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6811 target, 0);
6812 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6813 target, 0);
6817 return op0;
6822 enum machine_mode mode1;
6823 HOST_WIDE_INT bitsize, bitpos;
6824 tree offset;
6825 int volatilep = 0;
6826 unsigned int alignment;
6827 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6828 &mode1, &unsignedp, &volatilep,
6829 &alignment);
6831 /* If we got back the original object, something is wrong. Perhaps
6832 we are evaluating an expression too early. In any event, don't
6833 infinitely recurse. */
6834 if (tem == exp)
6835 abort ();
6837 /* If TEM's type is a union of variable size, pass TARGET to the inner
6838 computation, since it will need a temporary and TARGET is known
6839 to have to do. This occurs in unchecked conversion in Ada. */
6841 op0 = expand_expr (tem,
6842 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6843 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6844 != INTEGER_CST)
6845 ? target : NULL_RTX),
6846 VOIDmode,
6847 (modifier == EXPAND_INITIALIZER
6848 || modifier == EXPAND_CONST_ADDRESS)
6849 ? modifier : EXPAND_NORMAL);
6851 /* If this is a constant, put it into a register if it is a
6852 legitimate constant and OFFSET is 0 and memory if it isn't. */
6853 if (CONSTANT_P (op0))
6855 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6856 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6857 && offset == 0)
6858 op0 = force_reg (mode, op0);
6859 else
6860 op0 = validize_mem (force_const_mem (mode, op0));
6863 if (offset != 0)
6865 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6867 /* If this object is in memory, put it into a register.
6868 This case can't occur in C, but can in Ada if we have
6869 unchecked conversion of an expression from a scalar type to
6870 an array or record type. */
6871 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6872 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6874 tree nt = build_qualified_type (TREE_TYPE (tem),
6875 (TYPE_QUALS (TREE_TYPE (tem))
6876 | TYPE_QUAL_CONST));
6877 rtx memloc = assign_temp (nt, 1, 1, 1);
6879 mark_temp_addr_taken (memloc);
6880 emit_move_insn (memloc, op0);
6881 op0 = memloc;
6884 if (GET_CODE (op0) != MEM)
6885 abort ();
6887 if (GET_MODE (offset_rtx) != ptr_mode)
6889 #ifdef POINTERS_EXTEND_UNSIGNED
6890 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6891 #else
6892 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6893 #endif
6896 /* A constant address in OP0 can have VOIDmode, we must not try
6897 to call force_reg for that case. Avoid that case. */
6898 if (GET_CODE (op0) == MEM
6899 && GET_MODE (op0) == BLKmode
6900 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6901 && bitsize != 0
6902 && (bitpos % bitsize) == 0
6903 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6904 && alignment == GET_MODE_ALIGNMENT (mode1))
6906 rtx temp = change_address (op0, mode1,
6907 plus_constant (XEXP (op0, 0),
6908 (bitpos /
6909 BITS_PER_UNIT)));
6910 if (GET_CODE (XEXP (temp, 0)) == REG)
6911 op0 = temp;
6912 else
6913 op0 = change_address (op0, mode1,
6914 force_reg (GET_MODE (XEXP (temp, 0)),
6915 XEXP (temp, 0)));
6916 bitpos = 0;
6919 op0 = change_address (op0, VOIDmode,
6920 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6921 force_reg (ptr_mode,
6922 offset_rtx)));
6925 /* Don't forget about volatility even if this is a bitfield. */
6926 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6928 op0 = copy_rtx (op0);
6929 MEM_VOLATILE_P (op0) = 1;
6932 /* Check the access. */
6933 if (cfun != 0 && current_function_check_memory_usage
6934 && GET_CODE (op0) == MEM)
6936 enum memory_use_mode memory_usage;
6937 memory_usage = get_memory_usage_from_modifier (modifier);
6939 if (memory_usage != MEMORY_USE_DONT)
6941 rtx to;
6942 int size;
6944 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6945 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6947 /* Check the access right of the pointer. */
6948 in_check_memory_usage = 1;
6949 if (size > BITS_PER_UNIT)
6950 emit_library_call (chkr_check_addr_libfunc,
6951 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
6952 Pmode, GEN_INT (size / BITS_PER_UNIT),
6953 TYPE_MODE (sizetype),
6954 GEN_INT (memory_usage),
6955 TYPE_MODE (integer_type_node));
6956 in_check_memory_usage = 0;
6960 /* In cases where an aligned union has an unaligned object
6961 as a field, we might be extracting a BLKmode value from
6962 an integer-mode (e.g., SImode) object. Handle this case
6963 by doing the extract into an object as wide as the field
6964 (which we know to be the width of a basic mode), then
6965 storing into memory, and changing the mode to BLKmode.
6966 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6967 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6968 if (mode1 == VOIDmode
6969 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6970 || (modifier != EXPAND_CONST_ADDRESS
6971 && modifier != EXPAND_INITIALIZER
6972 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6973 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6974 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6975 /* If the field isn't aligned enough to fetch as a memref,
6976 fetch it as a bit field. */
6977 || (mode1 != BLKmode
6978 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
6979 && ((TYPE_ALIGN (TREE_TYPE (tem))
6980 < GET_MODE_ALIGNMENT (mode))
6981 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
6982 /* If the type and the field are a constant size and the
6983 size of the type isn't the same size as the bitfield,
6984 we must use bitfield operations. */
6985 || ((bitsize >= 0
6986 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
6987 == INTEGER_CST)
6988 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6989 bitsize)))))
6990 || (modifier != EXPAND_CONST_ADDRESS
6991 && modifier != EXPAND_INITIALIZER
6992 && mode == BLKmode
6993 && SLOW_UNALIGNED_ACCESS (mode, alignment)
6994 && (TYPE_ALIGN (type) > alignment
6995 || bitpos % TYPE_ALIGN (type) != 0)))
6997 enum machine_mode ext_mode = mode;
6999 if (ext_mode == BLKmode
7000 && ! (target != 0 && GET_CODE (op0) == MEM
7001 && GET_CODE (target) == MEM
7002 && bitpos % BITS_PER_UNIT == 0))
7003 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7005 if (ext_mode == BLKmode)
7007 /* In this case, BITPOS must start at a byte boundary and
7008 TARGET, if specified, must be a MEM. */
7009 if (GET_CODE (op0) != MEM
7010 || (target != 0 && GET_CODE (target) != MEM)
7011 || bitpos % BITS_PER_UNIT != 0)
7012 abort ();
7014 op0 = change_address (op0, VOIDmode,
7015 plus_constant (XEXP (op0, 0),
7016 bitpos / BITS_PER_UNIT));
7017 if (target == 0)
7018 target = assign_temp (type, 0, 1, 1);
7020 emit_block_move (target, op0,
7021 bitsize == -1 ? expr_size (exp)
7022 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7023 / BITS_PER_UNIT),
7024 BITS_PER_UNIT);
7026 return target;
7029 op0 = validize_mem (op0);
7031 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7032 mark_reg_pointer (XEXP (op0, 0), alignment);
7034 op0 = extract_bit_field (op0, bitsize, bitpos,
7035 unsignedp, target, ext_mode, ext_mode,
7036 alignment,
7037 int_size_in_bytes (TREE_TYPE (tem)));
7039 /* If the result is a record type and BITSIZE is narrower than
7040 the mode of OP0, an integral mode, and this is a big endian
7041 machine, we must put the field into the high-order bits. */
7042 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7043 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7044 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
7045 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7046 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7047 - bitsize),
7048 op0, 1);
7050 if (mode == BLKmode)
7052 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
7053 TYPE_QUAL_CONST);
7054 rtx new = assign_temp (nt, 0, 1, 1);
7056 emit_move_insn (new, op0);
7057 op0 = copy_rtx (new);
7058 PUT_MODE (op0, BLKmode);
7061 return op0;
7064 /* If the result is BLKmode, use that to access the object
7065 now as well. */
7066 if (mode == BLKmode)
7067 mode1 = BLKmode;
7069 /* Get a reference to just this component. */
7070 if (modifier == EXPAND_CONST_ADDRESS
7071 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7073 rtx new = gen_rtx_MEM (mode1,
7074 plus_constant (XEXP (op0, 0),
7075 (bitpos / BITS_PER_UNIT)));
7077 MEM_COPY_ATTRIBUTES (new, op0);
7078 op0 = new;
7080 else
7081 op0 = change_address (op0, mode1,
7082 plus_constant (XEXP (op0, 0),
7083 (bitpos / BITS_PER_UNIT)));
7085 set_mem_attributes (op0, exp, 0);
7086 if (GET_CODE (XEXP (op0, 0)) == REG)
7087 mark_reg_pointer (XEXP (op0, 0), alignment);
7089 MEM_VOLATILE_P (op0) |= volatilep;
7090 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7091 || modifier == EXPAND_CONST_ADDRESS
7092 || modifier == EXPAND_INITIALIZER)
7093 return op0;
7094 else if (target == 0)
7095 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7097 convert_move (target, op0, unsignedp);
7098 return target;
7101 /* Intended for a reference to a buffer of a file-object in Pascal.
7102 But it's not certain that a special tree code will really be
7103 necessary for these. INDIRECT_REF might work for them. */
7104 case BUFFER_REF:
7105 abort ();
7107 case IN_EXPR:
7109 /* Pascal set IN expression.
7111 Algorithm:
7112 rlo = set_low - (set_low%bits_per_word);
7113 the_word = set [ (index - rlo)/bits_per_word ];
7114 bit_index = index % bits_per_word;
7115 bitmask = 1 << bit_index;
7116 return !!(the_word & bitmask); */
7118 tree set = TREE_OPERAND (exp, 0);
7119 tree index = TREE_OPERAND (exp, 1);
7120 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7121 tree set_type = TREE_TYPE (set);
7122 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7123 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7124 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7125 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7126 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7127 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7128 rtx setaddr = XEXP (setval, 0);
7129 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7130 rtx rlow;
7131 rtx diff, quo, rem, addr, bit, result;
7133 /* If domain is empty, answer is no. Likewise if index is constant
7134 and out of bounds. */
7135 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7136 && TREE_CODE (set_low_bound) == INTEGER_CST
7137 && tree_int_cst_lt (set_high_bound, set_low_bound))
7138 || (TREE_CODE (index) == INTEGER_CST
7139 && TREE_CODE (set_low_bound) == INTEGER_CST
7140 && tree_int_cst_lt (index, set_low_bound))
7141 || (TREE_CODE (set_high_bound) == INTEGER_CST
7142 && TREE_CODE (index) == INTEGER_CST
7143 && tree_int_cst_lt (set_high_bound, index))))
7144 return const0_rtx;
7146 if (target == 0)
7147 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7149 /* If we get here, we have to generate the code for both cases
7150 (in range and out of range). */
7152 op0 = gen_label_rtx ();
7153 op1 = gen_label_rtx ();
7155 if (! (GET_CODE (index_val) == CONST_INT
7156 && GET_CODE (lo_r) == CONST_INT))
7158 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7159 GET_MODE (index_val), iunsignedp, 0, op1);
7162 if (! (GET_CODE (index_val) == CONST_INT
7163 && GET_CODE (hi_r) == CONST_INT))
7165 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7166 GET_MODE (index_val), iunsignedp, 0, op1);
7169 /* Calculate the element number of bit zero in the first word
7170 of the set. */
7171 if (GET_CODE (lo_r) == CONST_INT)
7172 rlow = GEN_INT (INTVAL (lo_r)
7173 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7174 else
7175 rlow = expand_binop (index_mode, and_optab, lo_r,
7176 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7177 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7179 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7180 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7182 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7183 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7184 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7185 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7187 addr = memory_address (byte_mode,
7188 expand_binop (index_mode, add_optab, diff,
7189 setaddr, NULL_RTX, iunsignedp,
7190 OPTAB_LIB_WIDEN));
7192 /* Extract the bit we want to examine. */
7193 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7194 gen_rtx_MEM (byte_mode, addr),
7195 make_tree (TREE_TYPE (index), rem),
7196 NULL_RTX, 1);
7197 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7198 GET_MODE (target) == byte_mode ? target : 0,
7199 1, OPTAB_LIB_WIDEN);
7201 if (result != target)
7202 convert_move (target, result, 1);
7204 /* Output the code to handle the out-of-range case. */
7205 emit_jump (op0);
7206 emit_label (op1);
7207 emit_move_insn (target, const0_rtx);
7208 emit_label (op0);
7209 return target;
7212 case WITH_CLEANUP_EXPR:
7213 if (RTL_EXPR_RTL (exp) == 0)
7215 RTL_EXPR_RTL (exp)
7216 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7217 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
7219 /* That's it for this cleanup. */
7220 TREE_OPERAND (exp, 2) = 0;
7222 return RTL_EXPR_RTL (exp);
7224 case CLEANUP_POINT_EXPR:
7226 /* Start a new binding layer that will keep track of all cleanup
7227 actions to be performed. */
7228 expand_start_bindings (2);
7230 target_temp_slot_level = temp_slot_level;
7232 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7233 /* If we're going to use this value, load it up now. */
7234 if (! ignore)
7235 op0 = force_not_mem (op0);
7236 preserve_temp_slots (op0);
7237 expand_end_bindings (NULL_TREE, 0, 0);
7239 return op0;
7241 case CALL_EXPR:
7242 /* Check for a built-in function. */
7243 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7244 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7245 == FUNCTION_DECL)
7246 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7248 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7249 == BUILT_IN_FRONTEND)
7250 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7251 else
7252 return expand_builtin (exp, target, subtarget, tmode, ignore);
7255 return expand_call (exp, target, ignore);
7257 case NON_LVALUE_EXPR:
7258 case NOP_EXPR:
7259 case CONVERT_EXPR:
7260 case REFERENCE_EXPR:
7261 if (TREE_OPERAND (exp, 0) == error_mark_node)
7262 return const0_rtx;
7264 if (TREE_CODE (type) == UNION_TYPE)
7266 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7268 /* If both input and output are BLKmode, this conversion
7269 isn't actually doing anything unless we need to make the
7270 alignment stricter. */
7271 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7272 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7273 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7274 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7275 modifier);
7277 if (target == 0)
7278 target = assign_temp (type, 0, 1, 1);
7280 if (GET_CODE (target) == MEM)
7281 /* Store data into beginning of memory target. */
7282 store_expr (TREE_OPERAND (exp, 0),
7283 change_address (target, TYPE_MODE (valtype), 0), 0);
7285 else if (GET_CODE (target) == REG)
7286 /* Store this field into a union of the proper type. */
7287 store_field (target,
7288 MIN ((int_size_in_bytes (TREE_TYPE
7289 (TREE_OPERAND (exp, 0)))
7290 * BITS_PER_UNIT),
7291 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7292 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7293 VOIDmode, 0, BITS_PER_UNIT,
7294 int_size_in_bytes (type), 0);
7295 else
7296 abort ();
7298 /* Return the entire union. */
7299 return target;
7302 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7304 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7305 ro_modifier);
7307 /* If the signedness of the conversion differs and OP0 is
7308 a promoted SUBREG, clear that indication since we now
7309 have to do the proper extension. */
7310 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7311 && GET_CODE (op0) == SUBREG)
7312 SUBREG_PROMOTED_VAR_P (op0) = 0;
7314 return op0;
7317 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7318 if (GET_MODE (op0) == mode)
7319 return op0;
7321 /* If OP0 is a constant, just convert it into the proper mode. */
7322 if (CONSTANT_P (op0))
7323 return
7324 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7325 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7327 if (modifier == EXPAND_INITIALIZER)
7328 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7330 if (target == 0)
7331 return
7332 convert_to_mode (mode, op0,
7333 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7334 else
7335 convert_move (target, op0,
7336 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7337 return target;
7339 case PLUS_EXPR:
7340 /* We come here from MINUS_EXPR when the second operand is a
7341 constant. */
7342 plus_expr:
7343 this_optab = ! unsignedp && flag_trapv
7344 && (GET_MODE_CLASS(mode) == MODE_INT)
7345 ? addv_optab : add_optab;
7347 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7348 something else, make sure we add the register to the constant and
7349 then to the other thing. This case can occur during strength
7350 reduction and doing it this way will produce better code if the
7351 frame pointer or argument pointer is eliminated.
7353 fold-const.c will ensure that the constant is always in the inner
7354 PLUS_EXPR, so the only case we need to do anything about is if
7355 sp, ap, or fp is our second argument, in which case we must swap
7356 the innermost first argument and our second argument. */
7358 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7359 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7360 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7361 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7362 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7363 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7365 tree t = TREE_OPERAND (exp, 1);
7367 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7368 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7371 /* If the result is to be ptr_mode and we are adding an integer to
7372 something, we might be forming a constant. So try to use
7373 plus_constant. If it produces a sum and we can't accept it,
7374 use force_operand. This allows P = &ARR[const] to generate
7375 efficient code on machines where a SYMBOL_REF is not a valid
7376 address.
7378 If this is an EXPAND_SUM call, always return the sum. */
7379 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7380 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7382 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7383 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7384 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7386 rtx constant_part;
7388 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7389 EXPAND_SUM);
7390 /* Use immed_double_const to ensure that the constant is
7391 truncated according to the mode of OP1, then sign extended
7392 to a HOST_WIDE_INT. Using the constant directly can result
7393 in non-canonical RTL in a 64x32 cross compile. */
7394 constant_part
7395 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7396 (HOST_WIDE_INT) 0,
7397 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7398 op1 = plus_constant (op1, INTVAL (constant_part));
7399 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7400 op1 = force_operand (op1, target);
7401 return op1;
7404 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7405 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7406 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7408 rtx constant_part;
7410 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7411 EXPAND_SUM);
7412 if (! CONSTANT_P (op0))
7414 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7415 VOIDmode, modifier);
7416 /* Don't go to both_summands if modifier
7417 says it's not right to return a PLUS. */
7418 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7419 goto binop2;
7420 goto both_summands;
7422 /* Use immed_double_const to ensure that the constant is
7423 truncated according to the mode of OP1, then sign extended
7424 to a HOST_WIDE_INT. Using the constant directly can result
7425 in non-canonical RTL in a 64x32 cross compile. */
7426 constant_part
7427 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7428 (HOST_WIDE_INT) 0,
7429 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7430 op0 = plus_constant (op0, INTVAL (constant_part));
7431 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7432 op0 = force_operand (op0, target);
7433 return op0;
7437 /* No sense saving up arithmetic to be done
7438 if it's all in the wrong mode to form part of an address.
7439 And force_operand won't know whether to sign-extend or
7440 zero-extend. */
7441 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7442 || mode != ptr_mode)
7443 goto binop;
7445 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7446 subtarget = 0;
7448 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7449 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7451 both_summands:
7452 /* Make sure any term that's a sum with a constant comes last. */
7453 if (GET_CODE (op0) == PLUS
7454 && CONSTANT_P (XEXP (op0, 1)))
7456 temp = op0;
7457 op0 = op1;
7458 op1 = temp;
7460 /* If adding to a sum including a constant,
7461 associate it to put the constant outside. */
7462 if (GET_CODE (op1) == PLUS
7463 && CONSTANT_P (XEXP (op1, 1)))
7465 rtx constant_term = const0_rtx;
7467 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7468 if (temp != 0)
7469 op0 = temp;
7470 /* Ensure that MULT comes first if there is one. */
7471 else if (GET_CODE (op0) == MULT)
7472 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7473 else
7474 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7476 /* Let's also eliminate constants from op0 if possible. */
7477 op0 = eliminate_constant_term (op0, &constant_term);
7479 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7480 their sum should be a constant. Form it into OP1, since the
7481 result we want will then be OP0 + OP1. */
7483 temp = simplify_binary_operation (PLUS, mode, constant_term,
7484 XEXP (op1, 1));
7485 if (temp != 0)
7486 op1 = temp;
7487 else
7488 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7491 /* Put a constant term last and put a multiplication first. */
7492 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7493 temp = op1, op1 = op0, op0 = temp;
7495 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7496 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7498 case MINUS_EXPR:
7499 /* For initializers, we are allowed to return a MINUS of two
7500 symbolic constants. Here we handle all cases when both operands
7501 are constant. */
7502 /* Handle difference of two symbolic constants,
7503 for the sake of an initializer. */
7504 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7505 && really_constant_p (TREE_OPERAND (exp, 0))
7506 && really_constant_p (TREE_OPERAND (exp, 1)))
7508 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7509 VOIDmode, ro_modifier);
7510 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7511 VOIDmode, ro_modifier);
7513 /* If the last operand is a CONST_INT, use plus_constant of
7514 the negated constant. Else make the MINUS. */
7515 if (GET_CODE (op1) == CONST_INT)
7516 return plus_constant (op0, - INTVAL (op1));
7517 else
7518 return gen_rtx_MINUS (mode, op0, op1);
7520 /* Convert A - const to A + (-const). */
7521 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7523 tree negated = fold (build1 (NEGATE_EXPR, type,
7524 TREE_OPERAND (exp, 1)));
7526 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7527 /* If we can't negate the constant in TYPE, leave it alone and
7528 expand_binop will negate it for us. We used to try to do it
7529 here in the signed version of TYPE, but that doesn't work
7530 on POINTER_TYPEs. */;
7531 else
7533 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7534 goto plus_expr;
7537 this_optab = ! unsignedp && flag_trapv
7538 && (GET_MODE_CLASS(mode) == MODE_INT)
7539 ? subv_optab : sub_optab;
7540 goto binop;
7542 case MULT_EXPR:
7543 /* If first operand is constant, swap them.
7544 Thus the following special case checks need only
7545 check the second operand. */
7546 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7548 register tree t1 = TREE_OPERAND (exp, 0);
7549 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7550 TREE_OPERAND (exp, 1) = t1;
7553 /* Attempt to return something suitable for generating an
7554 indexed address, for machines that support that. */
7556 if (modifier == EXPAND_SUM && mode == ptr_mode
7557 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7558 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7560 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7561 EXPAND_SUM);
7563 /* Apply distributive law if OP0 is x+c. */
7564 if (GET_CODE (op0) == PLUS
7565 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7566 return
7567 gen_rtx_PLUS
7568 (mode,
7569 gen_rtx_MULT
7570 (mode, XEXP (op0, 0),
7571 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7572 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7573 * INTVAL (XEXP (op0, 1))));
7575 if (GET_CODE (op0) != REG)
7576 op0 = force_operand (op0, NULL_RTX);
7577 if (GET_CODE (op0) != REG)
7578 op0 = copy_to_mode_reg (mode, op0);
7580 return
7581 gen_rtx_MULT (mode, op0,
7582 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7585 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7586 subtarget = 0;
7588 /* Check for multiplying things that have been extended
7589 from a narrower type. If this machine supports multiplying
7590 in that narrower type with a result in the desired type,
7591 do it that way, and avoid the explicit type-conversion. */
7592 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7593 && TREE_CODE (type) == INTEGER_TYPE
7594 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7595 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7596 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7597 && int_fits_type_p (TREE_OPERAND (exp, 1),
7598 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7599 /* Don't use a widening multiply if a shift will do. */
7600 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7601 > HOST_BITS_PER_WIDE_INT)
7602 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7604 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7605 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7607 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7608 /* If both operands are extended, they must either both
7609 be zero-extended or both be sign-extended. */
7610 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7612 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7614 enum machine_mode innermode
7615 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7616 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7617 ? smul_widen_optab : umul_widen_optab);
7618 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7619 ? umul_widen_optab : smul_widen_optab);
7620 if (mode == GET_MODE_WIDER_MODE (innermode))
7622 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7624 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7625 NULL_RTX, VOIDmode, 0);
7626 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7627 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7628 VOIDmode, 0);
7629 else
7630 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7631 NULL_RTX, VOIDmode, 0);
7632 goto binop2;
7634 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7635 && innermode == word_mode)
7637 rtx htem;
7638 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7639 NULL_RTX, VOIDmode, 0);
7640 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7641 op1 = convert_modes (innermode, mode,
7642 expand_expr (TREE_OPERAND (exp, 1),
7643 NULL_RTX, VOIDmode, 0),
7644 unsignedp);
7645 else
7646 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7647 NULL_RTX, VOIDmode, 0);
7648 temp = expand_binop (mode, other_optab, op0, op1, target,
7649 unsignedp, OPTAB_LIB_WIDEN);
7650 htem = expand_mult_highpart_adjust (innermode,
7651 gen_highpart (innermode, temp),
7652 op0, op1,
7653 gen_highpart (innermode, temp),
7654 unsignedp);
7655 emit_move_insn (gen_highpart (innermode, temp), htem);
7656 return temp;
7660 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7661 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7662 return expand_mult (mode, op0, op1, target, unsignedp);
7664 case TRUNC_DIV_EXPR:
7665 case FLOOR_DIV_EXPR:
7666 case CEIL_DIV_EXPR:
7667 case ROUND_DIV_EXPR:
7668 case EXACT_DIV_EXPR:
7669 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7670 subtarget = 0;
7671 /* Possible optimization: compute the dividend with EXPAND_SUM
7672 then if the divisor is constant can optimize the case
7673 where some terms of the dividend have coeffs divisible by it. */
7674 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7675 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7676 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7678 case RDIV_EXPR:
7679 this_optab = flodiv_optab;
7680 goto binop;
7682 case TRUNC_MOD_EXPR:
7683 case FLOOR_MOD_EXPR:
7684 case CEIL_MOD_EXPR:
7685 case ROUND_MOD_EXPR:
7686 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7687 subtarget = 0;
7688 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7689 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7690 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7692 case FIX_ROUND_EXPR:
7693 case FIX_FLOOR_EXPR:
7694 case FIX_CEIL_EXPR:
7695 abort (); /* Not used for C. */
7697 case FIX_TRUNC_EXPR:
7698 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7699 if (target == 0)
7700 target = gen_reg_rtx (mode);
7701 expand_fix (target, op0, unsignedp);
7702 return target;
7704 case FLOAT_EXPR:
7705 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7706 if (target == 0)
7707 target = gen_reg_rtx (mode);
7708 /* expand_float can't figure out what to do if FROM has VOIDmode.
7709 So give it the correct mode. With -O, cse will optimize this. */
7710 if (GET_MODE (op0) == VOIDmode)
7711 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7712 op0);
7713 expand_float (target, op0,
7714 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7715 return target;
7717 case NEGATE_EXPR:
7718 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7719 temp = expand_unop (mode,
7720 ! unsignedp && flag_trapv
7721 && (GET_MODE_CLASS(mode) == MODE_INT)
7722 ? negv_optab : neg_optab, op0, target, 0);
7723 if (temp == 0)
7724 abort ();
7725 return temp;
7727 case ABS_EXPR:
7728 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7730 /* Handle complex values specially. */
7731 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7732 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7733 return expand_complex_abs (mode, op0, target, unsignedp);
7735 /* Unsigned abs is simply the operand. Testing here means we don't
7736 risk generating incorrect code below. */
7737 if (TREE_UNSIGNED (type))
7738 return op0;
7740 return expand_abs (mode, op0, target, unsignedp,
7741 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7743 case MAX_EXPR:
7744 case MIN_EXPR:
7745 target = original_target;
7746 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7747 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7748 || GET_MODE (target) != mode
7749 || (GET_CODE (target) == REG
7750 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7751 target = gen_reg_rtx (mode);
7752 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7753 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7755 /* First try to do it with a special MIN or MAX instruction.
7756 If that does not win, use a conditional jump to select the proper
7757 value. */
7758 this_optab = (TREE_UNSIGNED (type)
7759 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7760 : (code == MIN_EXPR ? smin_optab : smax_optab));
7762 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7763 OPTAB_WIDEN);
7764 if (temp != 0)
7765 return temp;
7767 /* At this point, a MEM target is no longer useful; we will get better
7768 code without it. */
7770 if (GET_CODE (target) == MEM)
7771 target = gen_reg_rtx (mode);
7773 if (target != op0)
7774 emit_move_insn (target, op0);
7776 op0 = gen_label_rtx ();
7778 /* If this mode is an integer too wide to compare properly,
7779 compare word by word. Rely on cse to optimize constant cases. */
7780 if (GET_MODE_CLASS (mode) == MODE_INT
7781 && ! can_compare_p (GE, mode, ccp_jump))
7783 if (code == MAX_EXPR)
7784 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7785 target, op1, NULL_RTX, op0);
7786 else
7787 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7788 op1, target, NULL_RTX, op0);
7790 else
7792 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7793 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7794 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7795 op0);
7797 emit_move_insn (target, op1);
7798 emit_label (op0);
7799 return target;
7801 case BIT_NOT_EXPR:
7802 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7803 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7804 if (temp == 0)
7805 abort ();
7806 return temp;
7808 case FFS_EXPR:
7809 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7810 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7811 if (temp == 0)
7812 abort ();
7813 return temp;
7815 /* ??? Can optimize bitwise operations with one arg constant.
7816 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7817 and (a bitwise1 b) bitwise2 b (etc)
7818 but that is probably not worth while. */
7820 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7821 boolean values when we want in all cases to compute both of them. In
7822 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7823 as actual zero-or-1 values and then bitwise anding. In cases where
7824 there cannot be any side effects, better code would be made by
7825 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7826 how to recognize those cases. */
7828 case TRUTH_AND_EXPR:
7829 case BIT_AND_EXPR:
7830 this_optab = and_optab;
7831 goto binop;
7833 case TRUTH_OR_EXPR:
7834 case BIT_IOR_EXPR:
7835 this_optab = ior_optab;
7836 goto binop;
7838 case TRUTH_XOR_EXPR:
7839 case BIT_XOR_EXPR:
7840 this_optab = xor_optab;
7841 goto binop;
7843 case LSHIFT_EXPR:
7844 case RSHIFT_EXPR:
7845 case LROTATE_EXPR:
7846 case RROTATE_EXPR:
7847 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7848 subtarget = 0;
7849 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7850 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7851 unsignedp);
7853 /* Could determine the answer when only additive constants differ. Also,
7854 the addition of one can be handled by changing the condition. */
7855 case LT_EXPR:
7856 case LE_EXPR:
7857 case GT_EXPR:
7858 case GE_EXPR:
7859 case EQ_EXPR:
7860 case NE_EXPR:
7861 case UNORDERED_EXPR:
7862 case ORDERED_EXPR:
7863 case UNLT_EXPR:
7864 case UNLE_EXPR:
7865 case UNGT_EXPR:
7866 case UNGE_EXPR:
7867 case UNEQ_EXPR:
7868 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7869 if (temp != 0)
7870 return temp;
7872 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7873 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7874 && original_target
7875 && GET_CODE (original_target) == REG
7876 && (GET_MODE (original_target)
7877 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7879 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7880 VOIDmode, 0);
7882 if (temp != original_target)
7883 temp = copy_to_reg (temp);
7885 op1 = gen_label_rtx ();
7886 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7887 GET_MODE (temp), unsignedp, 0, op1);
7888 emit_move_insn (temp, const1_rtx);
7889 emit_label (op1);
7890 return temp;
7893 /* If no set-flag instruction, must generate a conditional
7894 store into a temporary variable. Drop through
7895 and handle this like && and ||. */
7897 case TRUTH_ANDIF_EXPR:
7898 case TRUTH_ORIF_EXPR:
7899 if (! ignore
7900 && (target == 0 || ! safe_from_p (target, exp, 1)
7901 /* Make sure we don't have a hard reg (such as function's return
7902 value) live across basic blocks, if not optimizing. */
7903 || (!optimize && GET_CODE (target) == REG
7904 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7905 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7907 if (target)
7908 emit_clr_insn (target);
7910 op1 = gen_label_rtx ();
7911 jumpifnot (exp, op1);
7913 if (target)
7914 emit_0_to_1_insn (target);
7916 emit_label (op1);
7917 return ignore ? const0_rtx : target;
7919 case TRUTH_NOT_EXPR:
7920 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7921 /* The parser is careful to generate TRUTH_NOT_EXPR
7922 only with operands that are always zero or one. */
7923 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7924 target, 1, OPTAB_LIB_WIDEN);
7925 if (temp == 0)
7926 abort ();
7927 return temp;
7929 case COMPOUND_EXPR:
7930 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7931 emit_queue ();
7932 return expand_expr (TREE_OPERAND (exp, 1),
7933 (ignore ? const0_rtx : target),
7934 VOIDmode, 0);
7936 case COND_EXPR:
7937 /* If we would have a "singleton" (see below) were it not for a
7938 conversion in each arm, bring that conversion back out. */
7939 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7940 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7941 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7942 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7944 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7945 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7947 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7948 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7949 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7950 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7951 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7952 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7953 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7954 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7955 return expand_expr (build1 (NOP_EXPR, type,
7956 build (COND_EXPR, TREE_TYPE (true),
7957 TREE_OPERAND (exp, 0),
7958 true, false)),
7959 target, tmode, modifier);
7963 /* Note that COND_EXPRs whose type is a structure or union
7964 are required to be constructed to contain assignments of
7965 a temporary variable, so that we can evaluate them here
7966 for side effect only. If type is void, we must do likewise. */
7968 /* If an arm of the branch requires a cleanup,
7969 only that cleanup is performed. */
7971 tree singleton = 0;
7972 tree binary_op = 0, unary_op = 0;
7974 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7975 convert it to our mode, if necessary. */
7976 if (integer_onep (TREE_OPERAND (exp, 1))
7977 && integer_zerop (TREE_OPERAND (exp, 2))
7978 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7980 if (ignore)
7982 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7983 ro_modifier);
7984 return const0_rtx;
7987 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7988 if (GET_MODE (op0) == mode)
7989 return op0;
7991 if (target == 0)
7992 target = gen_reg_rtx (mode);
7993 convert_move (target, op0, unsignedp);
7994 return target;
7997 /* Check for X ? A + B : A. If we have this, we can copy A to the
7998 output and conditionally add B. Similarly for unary operations.
7999 Don't do this if X has side-effects because those side effects
8000 might affect A or B and the "?" operation is a sequence point in
8001 ANSI. (operand_equal_p tests for side effects.) */
8003 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8004 && operand_equal_p (TREE_OPERAND (exp, 2),
8005 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8006 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8007 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8008 && operand_equal_p (TREE_OPERAND (exp, 1),
8009 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8010 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8011 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8012 && operand_equal_p (TREE_OPERAND (exp, 2),
8013 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8014 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8015 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8016 && operand_equal_p (TREE_OPERAND (exp, 1),
8017 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8018 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8020 /* If we are not to produce a result, we have no target. Otherwise,
8021 if a target was specified use it; it will not be used as an
8022 intermediate target unless it is safe. If no target, use a
8023 temporary. */
8025 if (ignore)
8026 temp = 0;
8027 else if (original_target
8028 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8029 || (singleton && GET_CODE (original_target) == REG
8030 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8031 && original_target == var_rtx (singleton)))
8032 && GET_MODE (original_target) == mode
8033 #ifdef HAVE_conditional_move
8034 && (! can_conditionally_move_p (mode)
8035 || GET_CODE (original_target) == REG
8036 || TREE_ADDRESSABLE (type))
8037 #endif
8038 && ! (GET_CODE (original_target) == MEM
8039 && MEM_VOLATILE_P (original_target)))
8040 temp = original_target;
8041 else if (TREE_ADDRESSABLE (type))
8042 abort ();
8043 else
8044 temp = assign_temp (type, 0, 0, 1);
8046 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8047 do the test of X as a store-flag operation, do this as
8048 A + ((X != 0) << log C). Similarly for other simple binary
8049 operators. Only do for C == 1 if BRANCH_COST is low. */
8050 if (temp && singleton && binary_op
8051 && (TREE_CODE (binary_op) == PLUS_EXPR
8052 || TREE_CODE (binary_op) == MINUS_EXPR
8053 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8054 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8055 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8056 : integer_onep (TREE_OPERAND (binary_op, 1)))
8057 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8059 rtx result;
8060 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8061 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8062 ? addv_optab : add_optab)
8063 : TREE_CODE (binary_op) == MINUS_EXPR
8064 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8065 ? subv_optab : sub_optab)
8066 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8067 : xor_optab);
8069 /* If we had X ? A : A + 1, do this as A + (X == 0).
8071 We have to invert the truth value here and then put it
8072 back later if do_store_flag fails. We cannot simply copy
8073 TREE_OPERAND (exp, 0) to another variable and modify that
8074 because invert_truthvalue can modify the tree pointed to
8075 by its argument. */
8076 if (singleton == TREE_OPERAND (exp, 1))
8077 TREE_OPERAND (exp, 0)
8078 = invert_truthvalue (TREE_OPERAND (exp, 0));
8080 result = do_store_flag (TREE_OPERAND (exp, 0),
8081 (safe_from_p (temp, singleton, 1)
8082 ? temp : NULL_RTX),
8083 mode, BRANCH_COST <= 1);
8085 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8086 result = expand_shift (LSHIFT_EXPR, mode, result,
8087 build_int_2 (tree_log2
8088 (TREE_OPERAND
8089 (binary_op, 1)),
8091 (safe_from_p (temp, singleton, 1)
8092 ? temp : NULL_RTX), 0);
8094 if (result)
8096 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8097 return expand_binop (mode, boptab, op1, result, temp,
8098 unsignedp, OPTAB_LIB_WIDEN);
8100 else if (singleton == TREE_OPERAND (exp, 1))
8101 TREE_OPERAND (exp, 0)
8102 = invert_truthvalue (TREE_OPERAND (exp, 0));
8105 do_pending_stack_adjust ();
8106 NO_DEFER_POP;
8107 op0 = gen_label_rtx ();
8109 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8111 if (temp != 0)
8113 /* If the target conflicts with the other operand of the
8114 binary op, we can't use it. Also, we can't use the target
8115 if it is a hard register, because evaluating the condition
8116 might clobber it. */
8117 if ((binary_op
8118 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8119 || (GET_CODE (temp) == REG
8120 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8121 temp = gen_reg_rtx (mode);
8122 store_expr (singleton, temp, 0);
8124 else
8125 expand_expr (singleton,
8126 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8127 if (singleton == TREE_OPERAND (exp, 1))
8128 jumpif (TREE_OPERAND (exp, 0), op0);
8129 else
8130 jumpifnot (TREE_OPERAND (exp, 0), op0);
8132 start_cleanup_deferral ();
8133 if (binary_op && temp == 0)
8134 /* Just touch the other operand. */
8135 expand_expr (TREE_OPERAND (binary_op, 1),
8136 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8137 else if (binary_op)
8138 store_expr (build (TREE_CODE (binary_op), type,
8139 make_tree (type, temp),
8140 TREE_OPERAND (binary_op, 1)),
8141 temp, 0);
8142 else
8143 store_expr (build1 (TREE_CODE (unary_op), type,
8144 make_tree (type, temp)),
8145 temp, 0);
8146 op1 = op0;
8148 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8149 comparison operator. If we have one of these cases, set the
8150 output to A, branch on A (cse will merge these two references),
8151 then set the output to FOO. */
8152 else if (temp
8153 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8154 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8155 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8156 TREE_OPERAND (exp, 1), 0)
8157 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8158 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8159 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8161 if (GET_CODE (temp) == REG
8162 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8163 temp = gen_reg_rtx (mode);
8164 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8165 jumpif (TREE_OPERAND (exp, 0), op0);
8167 start_cleanup_deferral ();
8168 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8169 op1 = op0;
8171 else if (temp
8172 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8173 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8174 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8175 TREE_OPERAND (exp, 2), 0)
8176 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8177 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8178 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8180 if (GET_CODE (temp) == REG
8181 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8182 temp = gen_reg_rtx (mode);
8183 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8184 jumpifnot (TREE_OPERAND (exp, 0), op0);
8186 start_cleanup_deferral ();
8187 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8188 op1 = op0;
8190 else
8192 op1 = gen_label_rtx ();
8193 jumpifnot (TREE_OPERAND (exp, 0), op0);
8195 start_cleanup_deferral ();
8197 /* One branch of the cond can be void, if it never returns. For
8198 example A ? throw : E */
8199 if (temp != 0
8200 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8201 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8202 else
8203 expand_expr (TREE_OPERAND (exp, 1),
8204 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8205 end_cleanup_deferral ();
8206 emit_queue ();
8207 emit_jump_insn (gen_jump (op1));
8208 emit_barrier ();
8209 emit_label (op0);
8210 start_cleanup_deferral ();
8211 if (temp != 0
8212 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8213 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8214 else
8215 expand_expr (TREE_OPERAND (exp, 2),
8216 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8219 end_cleanup_deferral ();
8221 emit_queue ();
8222 emit_label (op1);
8223 OK_DEFER_POP;
8225 return temp;
8228 case TARGET_EXPR:
8230 /* Something needs to be initialized, but we didn't know
8231 where that thing was when building the tree. For example,
8232 it could be the return value of a function, or a parameter
8233 to a function which lays down in the stack, or a temporary
8234 variable which must be passed by reference.
8236 We guarantee that the expression will either be constructed
8237 or copied into our original target. */
8239 tree slot = TREE_OPERAND (exp, 0);
8240 tree cleanups = NULL_TREE;
8241 tree exp1;
8243 if (TREE_CODE (slot) != VAR_DECL)
8244 abort ();
8246 if (! ignore)
8247 target = original_target;
8249 /* Set this here so that if we get a target that refers to a
8250 register variable that's already been used, put_reg_into_stack
8251 knows that it should fix up those uses. */
8252 TREE_USED (slot) = 1;
8254 if (target == 0)
8256 if (DECL_RTL (slot) != 0)
8258 target = DECL_RTL (slot);
8259 /* If we have already expanded the slot, so don't do
8260 it again. (mrs) */
8261 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8262 return target;
8264 else
8266 target = assign_temp (type, 2, 0, 1);
8267 /* All temp slots at this level must not conflict. */
8268 preserve_temp_slots (target);
8269 DECL_RTL (slot) = target;
8270 if (TREE_ADDRESSABLE (slot))
8271 put_var_into_stack (slot);
8273 /* Since SLOT is not known to the called function
8274 to belong to its stack frame, we must build an explicit
8275 cleanup. This case occurs when we must build up a reference
8276 to pass the reference as an argument. In this case,
8277 it is very likely that such a reference need not be
8278 built here. */
8280 if (TREE_OPERAND (exp, 2) == 0)
8281 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8282 cleanups = TREE_OPERAND (exp, 2);
8285 else
8287 /* This case does occur, when expanding a parameter which
8288 needs to be constructed on the stack. The target
8289 is the actual stack address that we want to initialize.
8290 The function we call will perform the cleanup in this case. */
8292 /* If we have already assigned it space, use that space,
8293 not target that we were passed in, as our target
8294 parameter is only a hint. */
8295 if (DECL_RTL (slot) != 0)
8297 target = DECL_RTL (slot);
8298 /* If we have already expanded the slot, so don't do
8299 it again. (mrs) */
8300 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8301 return target;
8303 else
8305 DECL_RTL (slot) = target;
8306 /* If we must have an addressable slot, then make sure that
8307 the RTL that we just stored in slot is OK. */
8308 if (TREE_ADDRESSABLE (slot))
8309 put_var_into_stack (slot);
8313 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8314 /* Mark it as expanded. */
8315 TREE_OPERAND (exp, 1) = NULL_TREE;
8317 store_expr (exp1, target, 0);
8319 expand_decl_cleanup (NULL_TREE, cleanups);
8321 return target;
8324 case INIT_EXPR:
8326 tree lhs = TREE_OPERAND (exp, 0);
8327 tree rhs = TREE_OPERAND (exp, 1);
8328 tree noncopied_parts = 0;
8329 tree lhs_type = TREE_TYPE (lhs);
8331 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8332 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8333 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8334 TYPE_NONCOPIED_PARTS (lhs_type));
8335 while (noncopied_parts != 0)
8337 expand_assignment (TREE_VALUE (noncopied_parts),
8338 TREE_PURPOSE (noncopied_parts), 0, 0);
8339 noncopied_parts = TREE_CHAIN (noncopied_parts);
8341 return temp;
8344 case MODIFY_EXPR:
8346 /* If lhs is complex, expand calls in rhs before computing it.
8347 That's so we don't compute a pointer and save it over a call.
8348 If lhs is simple, compute it first so we can give it as a
8349 target if the rhs is just a call. This avoids an extra temp and copy
8350 and that prevents a partial-subsumption which makes bad code.
8351 Actually we could treat component_ref's of vars like vars. */
8353 tree lhs = TREE_OPERAND (exp, 0);
8354 tree rhs = TREE_OPERAND (exp, 1);
8355 tree noncopied_parts = 0;
8356 tree lhs_type = TREE_TYPE (lhs);
8358 temp = 0;
8360 if (TREE_CODE (lhs) != VAR_DECL
8361 && TREE_CODE (lhs) != RESULT_DECL
8362 && TREE_CODE (lhs) != PARM_DECL
8363 && ! (TREE_CODE (lhs) == INDIRECT_REF
8364 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
8366 /* Check for |= or &= of a bitfield of size one into another bitfield
8367 of size 1. In this case, (unless we need the result of the
8368 assignment) we can do this more efficiently with a
8369 test followed by an assignment, if necessary.
8371 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8372 things change so we do, this code should be enhanced to
8373 support it. */
8374 if (ignore
8375 && TREE_CODE (lhs) == COMPONENT_REF
8376 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8377 || TREE_CODE (rhs) == BIT_AND_EXPR)
8378 && TREE_OPERAND (rhs, 0) == lhs
8379 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8380 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8381 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8383 rtx label = gen_label_rtx ();
8385 do_jump (TREE_OPERAND (rhs, 1),
8386 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8387 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8388 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8389 (TREE_CODE (rhs) == BIT_IOR_EXPR
8390 ? integer_one_node
8391 : integer_zero_node)),
8392 0, 0);
8393 do_pending_stack_adjust ();
8394 emit_label (label);
8395 return const0_rtx;
8398 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8399 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8400 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8401 TYPE_NONCOPIED_PARTS (lhs_type));
8403 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8404 while (noncopied_parts != 0)
8406 expand_assignment (TREE_PURPOSE (noncopied_parts),
8407 TREE_VALUE (noncopied_parts), 0, 0);
8408 noncopied_parts = TREE_CHAIN (noncopied_parts);
8410 return temp;
8413 case RETURN_EXPR:
8414 if (!TREE_OPERAND (exp, 0))
8415 expand_null_return ();
8416 else
8417 expand_return (TREE_OPERAND (exp, 0));
8418 return const0_rtx;
8420 case PREINCREMENT_EXPR:
8421 case PREDECREMENT_EXPR:
8422 return expand_increment (exp, 0, ignore);
8424 case POSTINCREMENT_EXPR:
8425 case POSTDECREMENT_EXPR:
8426 /* Faster to treat as pre-increment if result is not used. */
8427 return expand_increment (exp, ! ignore, ignore);
8429 case ADDR_EXPR:
8430 /* If nonzero, TEMP will be set to the address of something that might
8431 be a MEM corresponding to a stack slot. */
8432 temp = 0;
8434 /* Are we taking the address of a nested function? */
8435 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8436 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8437 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8438 && ! TREE_STATIC (exp))
8440 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8441 op0 = force_operand (op0, target);
8443 /* If we are taking the address of something erroneous, just
8444 return a zero. */
8445 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8446 return const0_rtx;
8447 else
8449 /* We make sure to pass const0_rtx down if we came in with
8450 ignore set, to avoid doing the cleanups twice for something. */
8451 op0 = expand_expr (TREE_OPERAND (exp, 0),
8452 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8453 (modifier == EXPAND_INITIALIZER
8454 ? modifier : EXPAND_CONST_ADDRESS));
8456 /* If we are going to ignore the result, OP0 will have been set
8457 to const0_rtx, so just return it. Don't get confused and
8458 think we are taking the address of the constant. */
8459 if (ignore)
8460 return op0;
8462 op0 = protect_from_queue (op0, 0);
8464 /* We would like the object in memory. If it is a constant, we can
8465 have it be statically allocated into memory. For a non-constant,
8466 we need to allocate some memory and store the value into it. */
8468 if (CONSTANT_P (op0))
8469 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8470 op0);
8471 else if (GET_CODE (op0) == MEM)
8473 mark_temp_addr_taken (op0);
8474 temp = XEXP (op0, 0);
8477 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8478 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8479 || GET_CODE (op0) == PARALLEL)
8481 /* If this object is in a register, it must be not
8482 be BLKmode. */
8483 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8484 tree nt = build_qualified_type (inner_type,
8485 (TYPE_QUALS (inner_type)
8486 | TYPE_QUAL_CONST));
8487 rtx memloc = assign_temp (nt, 1, 1, 1);
8489 mark_temp_addr_taken (memloc);
8490 if (GET_CODE (op0) == PARALLEL)
8491 /* Handle calls that pass values in multiple non-contiguous
8492 locations. The Irix 6 ABI has examples of this. */
8493 emit_group_store (memloc, op0,
8494 int_size_in_bytes (inner_type),
8495 TYPE_ALIGN (inner_type));
8496 else
8497 emit_move_insn (memloc, op0);
8498 op0 = memloc;
8501 if (GET_CODE (op0) != MEM)
8502 abort ();
8504 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8506 temp = XEXP (op0, 0);
8507 #ifdef POINTERS_EXTEND_UNSIGNED
8508 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8509 && mode == ptr_mode)
8510 temp = convert_memory_address (ptr_mode, temp);
8511 #endif
8512 return temp;
8515 op0 = force_operand (XEXP (op0, 0), target);
8518 if (flag_force_addr && GET_CODE (op0) != REG)
8519 op0 = force_reg (Pmode, op0);
8521 if (GET_CODE (op0) == REG
8522 && ! REG_USERVAR_P (op0))
8523 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8525 /* If we might have had a temp slot, add an equivalent address
8526 for it. */
8527 if (temp != 0)
8528 update_temp_slot_address (temp, op0);
8530 #ifdef POINTERS_EXTEND_UNSIGNED
8531 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8532 && mode == ptr_mode)
8533 op0 = convert_memory_address (ptr_mode, op0);
8534 #endif
8536 return op0;
8538 case ENTRY_VALUE_EXPR:
8539 abort ();
8541 /* COMPLEX type for Extended Pascal & Fortran */
8542 case COMPLEX_EXPR:
8544 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8545 rtx insns;
8547 /* Get the rtx code of the operands. */
8548 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8549 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8551 if (! target)
8552 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8554 start_sequence ();
8556 /* Move the real (op0) and imaginary (op1) parts to their location. */
8557 emit_move_insn (gen_realpart (mode, target), op0);
8558 emit_move_insn (gen_imagpart (mode, target), op1);
8560 insns = get_insns ();
8561 end_sequence ();
8563 /* Complex construction should appear as a single unit. */
8564 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8565 each with a separate pseudo as destination.
8566 It's not correct for flow to treat them as a unit. */
8567 if (GET_CODE (target) != CONCAT)
8568 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8569 else
8570 emit_insns (insns);
8572 return target;
8575 case REALPART_EXPR:
8576 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8577 return gen_realpart (mode, op0);
8579 case IMAGPART_EXPR:
8580 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8581 return gen_imagpart (mode, op0);
8583 case CONJ_EXPR:
8585 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8586 rtx imag_t;
8587 rtx insns;
8589 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8591 if (! target)
8592 target = gen_reg_rtx (mode);
8594 start_sequence ();
8596 /* Store the realpart and the negated imagpart to target. */
8597 emit_move_insn (gen_realpart (partmode, target),
8598 gen_realpart (partmode, op0));
8600 imag_t = gen_imagpart (partmode, target);
8601 temp = expand_unop (partmode,
8602 ! unsignedp && flag_trapv
8603 && (GET_MODE_CLASS(partmode) == MODE_INT)
8604 ? negv_optab : neg_optab,
8605 gen_imagpart (partmode, op0), imag_t, 0);
8606 if (temp != imag_t)
8607 emit_move_insn (imag_t, temp);
8609 insns = get_insns ();
8610 end_sequence ();
8612 /* Conjugate should appear as a single unit
8613 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8614 each with a separate pseudo as destination.
8615 It's not correct for flow to treat them as a unit. */
8616 if (GET_CODE (target) != CONCAT)
8617 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8618 else
8619 emit_insns (insns);
8621 return target;
8624 case TRY_CATCH_EXPR:
8626 tree handler = TREE_OPERAND (exp, 1);
8628 expand_eh_region_start ();
8630 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8632 expand_eh_region_end (handler);
8634 return op0;
8637 case TRY_FINALLY_EXPR:
8639 tree try_block = TREE_OPERAND (exp, 0);
8640 tree finally_block = TREE_OPERAND (exp, 1);
8641 rtx finally_label = gen_label_rtx ();
8642 rtx done_label = gen_label_rtx ();
8643 rtx return_link = gen_reg_rtx (Pmode);
8644 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8645 (tree) finally_label, (tree) return_link);
8646 TREE_SIDE_EFFECTS (cleanup) = 1;
8648 /* Start a new binding layer that will keep track of all cleanup
8649 actions to be performed. */
8650 expand_start_bindings (2);
8652 target_temp_slot_level = temp_slot_level;
8654 expand_decl_cleanup (NULL_TREE, cleanup);
8655 op0 = expand_expr (try_block, target, tmode, modifier);
8657 preserve_temp_slots (op0);
8658 expand_end_bindings (NULL_TREE, 0, 0);
8659 emit_jump (done_label);
8660 emit_label (finally_label);
8661 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8662 emit_indirect_jump (return_link);
8663 emit_label (done_label);
8664 return op0;
8667 case GOTO_SUBROUTINE_EXPR:
8669 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8670 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8671 rtx return_address = gen_label_rtx ();
8672 emit_move_insn (return_link,
8673 gen_rtx_LABEL_REF (Pmode, return_address));
8674 emit_jump (subr);
8675 emit_label (return_address);
8676 return const0_rtx;
8679 case POPDCC_EXPR:
8681 rtx dcc = get_dynamic_cleanup_chain ();
8682 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8683 return const0_rtx;
8686 case POPDHC_EXPR:
8688 rtx dhc = get_dynamic_handler_chain ();
8689 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8690 return const0_rtx;
8693 case VA_ARG_EXPR:
8694 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8696 default:
8697 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8700 /* Here to do an ordinary binary operator, generating an instruction
8701 from the optab already placed in `this_optab'. */
8702 binop:
8703 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8704 subtarget = 0;
8705 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8706 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8707 binop2:
8708 temp = expand_binop (mode, this_optab, op0, op1, target,
8709 unsignedp, OPTAB_LIB_WIDEN);
8710 if (temp == 0)
8711 abort ();
8712 return temp;
8715 /* Similar to expand_expr, except that we don't specify a target, target
8716 mode, or modifier and we return the alignment of the inner type. This is
8717 used in cases where it is not necessary to align the result to the
8718 alignment of its type as long as we know the alignment of the result, for
8719 example for comparisons of BLKmode values. */
8721 static rtx
8722 expand_expr_unaligned (exp, palign)
8723 register tree exp;
8724 unsigned int *palign;
8726 register rtx op0;
8727 tree type = TREE_TYPE (exp);
8728 register enum machine_mode mode = TYPE_MODE (type);
8730 /* Default the alignment we return to that of the type. */
8731 *palign = TYPE_ALIGN (type);
8733 /* The only cases in which we do anything special is if the resulting mode
8734 is BLKmode. */
8735 if (mode != BLKmode)
8736 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8738 switch (TREE_CODE (exp))
8740 case CONVERT_EXPR:
8741 case NOP_EXPR:
8742 case NON_LVALUE_EXPR:
8743 /* Conversions between BLKmode values don't change the underlying
8744 alignment or value. */
8745 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8746 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8747 break;
8749 case ARRAY_REF:
8750 /* Much of the code for this case is copied directly from expand_expr.
8751 We need to duplicate it here because we will do something different
8752 in the fall-through case, so we need to handle the same exceptions
8753 it does. */
8755 tree array = TREE_OPERAND (exp, 0);
8756 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8757 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8758 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8759 HOST_WIDE_INT i;
8761 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8762 abort ();
8764 /* Optimize the special-case of a zero lower bound.
8766 We convert the low_bound to sizetype to avoid some problems
8767 with constant folding. (E.g. suppose the lower bound is 1,
8768 and its mode is QI. Without the conversion, (ARRAY
8769 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8770 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8772 if (! integer_zerop (low_bound))
8773 index = size_diffop (index, convert (sizetype, low_bound));
8775 /* If this is a constant index into a constant array,
8776 just get the value from the array. Handle both the cases when
8777 we have an explicit constructor and when our operand is a variable
8778 that was declared const. */
8780 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8781 && host_integerp (index, 0)
8782 && 0 > compare_tree_int (index,
8783 list_length (CONSTRUCTOR_ELTS
8784 (TREE_OPERAND (exp, 0)))))
8786 tree elem;
8788 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8789 i = tree_low_cst (index, 0);
8790 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8793 if (elem)
8794 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8797 else if (optimize >= 1
8798 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8799 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8800 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8802 if (TREE_CODE (index) == INTEGER_CST)
8804 tree init = DECL_INITIAL (array);
8806 if (TREE_CODE (init) == CONSTRUCTOR)
8808 tree elem;
8810 for (elem = CONSTRUCTOR_ELTS (init);
8811 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8812 elem = TREE_CHAIN (elem))
8815 if (elem)
8816 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8817 palign);
8822 /* Fall through. */
8824 case COMPONENT_REF:
8825 case BIT_FIELD_REF:
8826 /* If the operand is a CONSTRUCTOR, we can just extract the
8827 appropriate field if it is present. Don't do this if we have
8828 already written the data since we want to refer to that copy
8829 and varasm.c assumes that's what we'll do. */
8830 if (TREE_CODE (exp) != ARRAY_REF
8831 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8832 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8834 tree elt;
8836 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8837 elt = TREE_CHAIN (elt))
8838 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8839 /* Note that unlike the case in expand_expr, we know this is
8840 BLKmode and hence not an integer. */
8841 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8845 enum machine_mode mode1;
8846 HOST_WIDE_INT bitsize, bitpos;
8847 tree offset;
8848 int volatilep = 0;
8849 unsigned int alignment;
8850 int unsignedp;
8851 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8852 &mode1, &unsignedp, &volatilep,
8853 &alignment);
8855 /* If we got back the original object, something is wrong. Perhaps
8856 we are evaluating an expression too early. In any event, don't
8857 infinitely recurse. */
8858 if (tem == exp)
8859 abort ();
8861 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8863 /* If this is a constant, put it into a register if it is a
8864 legitimate constant and OFFSET is 0 and memory if it isn't. */
8865 if (CONSTANT_P (op0))
8867 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8869 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8870 && offset == 0)
8871 op0 = force_reg (inner_mode, op0);
8872 else
8873 op0 = validize_mem (force_const_mem (inner_mode, op0));
8876 if (offset != 0)
8878 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8880 /* If this object is in a register, put it into memory.
8881 This case can't occur in C, but can in Ada if we have
8882 unchecked conversion of an expression from a scalar type to
8883 an array or record type. */
8884 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8885 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8887 tree nt = build_qualified_type (TREE_TYPE (tem),
8888 (TYPE_QUALS (TREE_TYPE (tem))
8889 | TYPE_QUAL_CONST));
8890 rtx memloc = assign_temp (nt, 1, 1, 1);
8892 mark_temp_addr_taken (memloc);
8893 emit_move_insn (memloc, op0);
8894 op0 = memloc;
8897 if (GET_CODE (op0) != MEM)
8898 abort ();
8900 if (GET_MODE (offset_rtx) != ptr_mode)
8902 #ifdef POINTERS_EXTEND_UNSIGNED
8903 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8904 #else
8905 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8906 #endif
8909 op0 = change_address (op0, VOIDmode,
8910 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
8911 force_reg (ptr_mode,
8912 offset_rtx)));
8915 /* Don't forget about volatility even if this is a bitfield. */
8916 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
8918 op0 = copy_rtx (op0);
8919 MEM_VOLATILE_P (op0) = 1;
8922 /* Check the access. */
8923 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
8925 rtx to;
8926 int size;
8928 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
8929 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
8931 /* Check the access right of the pointer. */
8932 in_check_memory_usage = 1;
8933 if (size > BITS_PER_UNIT)
8934 emit_library_call (chkr_check_addr_libfunc,
8935 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
8936 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
8937 TYPE_MODE (sizetype),
8938 GEN_INT (MEMORY_USE_RO),
8939 TYPE_MODE (integer_type_node));
8940 in_check_memory_usage = 0;
8943 /* In cases where an aligned union has an unaligned object
8944 as a field, we might be extracting a BLKmode value from
8945 an integer-mode (e.g., SImode) object. Handle this case
8946 by doing the extract into an object as wide as the field
8947 (which we know to be the width of a basic mode), then
8948 storing into memory, and changing the mode to BLKmode.
8949 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8950 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8951 if (mode1 == VOIDmode
8952 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8953 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
8954 && (TYPE_ALIGN (type) > alignment
8955 || bitpos % TYPE_ALIGN (type) != 0)))
8957 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
8959 if (ext_mode == BLKmode)
8961 /* In this case, BITPOS must start at a byte boundary. */
8962 if (GET_CODE (op0) != MEM
8963 || bitpos % BITS_PER_UNIT != 0)
8964 abort ();
8966 op0 = change_address (op0, VOIDmode,
8967 plus_constant (XEXP (op0, 0),
8968 bitpos / BITS_PER_UNIT));
8970 else
8972 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
8973 TYPE_QUAL_CONST);
8974 rtx new = assign_temp (nt, 0, 1, 1);
8976 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
8977 unsignedp, NULL_RTX, ext_mode,
8978 ext_mode, alignment,
8979 int_size_in_bytes (TREE_TYPE (tem)));
8981 /* If the result is a record type and BITSIZE is narrower than
8982 the mode of OP0, an integral mode, and this is a big endian
8983 machine, we must put the field into the high-order bits. */
8984 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
8985 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8986 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
8987 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
8988 size_int (GET_MODE_BITSIZE
8989 (GET_MODE (op0))
8990 - bitsize),
8991 op0, 1);
8993 emit_move_insn (new, op0);
8994 op0 = copy_rtx (new);
8995 PUT_MODE (op0, BLKmode);
8998 else
8999 /* Get a reference to just this component. */
9000 op0 = change_address (op0, mode1,
9001 plus_constant (XEXP (op0, 0),
9002 (bitpos / BITS_PER_UNIT)));
9004 MEM_ALIAS_SET (op0) = get_alias_set (exp);
9006 /* Adjust the alignment in case the bit position is not
9007 a multiple of the alignment of the inner object. */
9008 while (bitpos % alignment != 0)
9009 alignment >>= 1;
9011 if (GET_CODE (XEXP (op0, 0)) == REG)
9012 mark_reg_pointer (XEXP (op0, 0), alignment);
9014 MEM_IN_STRUCT_P (op0) = 1;
9015 MEM_VOLATILE_P (op0) |= volatilep;
9017 *palign = alignment;
9018 return op0;
9021 default:
9022 break;
9026 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9029 /* Return the tree node if a ARG corresponds to a string constant or zero
9030 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9031 in bytes within the string that ARG is accessing. The type of the
9032 offset will be `sizetype'. */
9034 tree
9035 string_constant (arg, ptr_offset)
9036 tree arg;
9037 tree *ptr_offset;
9039 STRIP_NOPS (arg);
9041 if (TREE_CODE (arg) == ADDR_EXPR
9042 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9044 *ptr_offset = size_zero_node;
9045 return TREE_OPERAND (arg, 0);
9047 else if (TREE_CODE (arg) == PLUS_EXPR)
9049 tree arg0 = TREE_OPERAND (arg, 0);
9050 tree arg1 = TREE_OPERAND (arg, 1);
9052 STRIP_NOPS (arg0);
9053 STRIP_NOPS (arg1);
9055 if (TREE_CODE (arg0) == ADDR_EXPR
9056 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9058 *ptr_offset = convert (sizetype, arg1);
9059 return TREE_OPERAND (arg0, 0);
9061 else if (TREE_CODE (arg1) == ADDR_EXPR
9062 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9064 *ptr_offset = convert (sizetype, arg0);
9065 return TREE_OPERAND (arg1, 0);
9069 return 0;
9072 /* Expand code for a post- or pre- increment or decrement
9073 and return the RTX for the result.
9074 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9076 static rtx
9077 expand_increment (exp, post, ignore)
9078 register tree exp;
9079 int post, ignore;
9081 register rtx op0, op1;
9082 register rtx temp, value;
9083 register tree incremented = TREE_OPERAND (exp, 0);
9084 optab this_optab = add_optab;
9085 int icode;
9086 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9087 int op0_is_copy = 0;
9088 int single_insn = 0;
9089 /* 1 means we can't store into OP0 directly,
9090 because it is a subreg narrower than a word,
9091 and we don't dare clobber the rest of the word. */
9092 int bad_subreg = 0;
9094 /* Stabilize any component ref that might need to be
9095 evaluated more than once below. */
9096 if (!post
9097 || TREE_CODE (incremented) == BIT_FIELD_REF
9098 || (TREE_CODE (incremented) == COMPONENT_REF
9099 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9100 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9101 incremented = stabilize_reference (incremented);
9102 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9103 ones into save exprs so that they don't accidentally get evaluated
9104 more than once by the code below. */
9105 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9106 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9107 incremented = save_expr (incremented);
9109 /* Compute the operands as RTX.
9110 Note whether OP0 is the actual lvalue or a copy of it:
9111 I believe it is a copy iff it is a register or subreg
9112 and insns were generated in computing it. */
9114 temp = get_last_insn ();
9115 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9117 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9118 in place but instead must do sign- or zero-extension during assignment,
9119 so we copy it into a new register and let the code below use it as
9120 a copy.
9122 Note that we can safely modify this SUBREG since it is know not to be
9123 shared (it was made by the expand_expr call above). */
9125 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9127 if (post)
9128 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9129 else
9130 bad_subreg = 1;
9132 else if (GET_CODE (op0) == SUBREG
9133 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9135 /* We cannot increment this SUBREG in place. If we are
9136 post-incrementing, get a copy of the old value. Otherwise,
9137 just mark that we cannot increment in place. */
9138 if (post)
9139 op0 = copy_to_reg (op0);
9140 else
9141 bad_subreg = 1;
9144 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9145 && temp != get_last_insn ());
9146 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9147 EXPAND_MEMORY_USE_BAD);
9149 /* Decide whether incrementing or decrementing. */
9150 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9151 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9152 this_optab = sub_optab;
9154 /* Convert decrement by a constant into a negative increment. */
9155 if (this_optab == sub_optab
9156 && GET_CODE (op1) == CONST_INT)
9158 op1 = GEN_INT (-INTVAL (op1));
9159 this_optab = add_optab;
9162 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9163 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9165 /* For a preincrement, see if we can do this with a single instruction. */
9166 if (!post)
9168 icode = (int) this_optab->handlers[(int) mode].insn_code;
9169 if (icode != (int) CODE_FOR_nothing
9170 /* Make sure that OP0 is valid for operands 0 and 1
9171 of the insn we want to queue. */
9172 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9173 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9174 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9175 single_insn = 1;
9178 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9179 then we cannot just increment OP0. We must therefore contrive to
9180 increment the original value. Then, for postincrement, we can return
9181 OP0 since it is a copy of the old value. For preincrement, expand here
9182 unless we can do it with a single insn.
9184 Likewise if storing directly into OP0 would clobber high bits
9185 we need to preserve (bad_subreg). */
9186 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9188 /* This is the easiest way to increment the value wherever it is.
9189 Problems with multiple evaluation of INCREMENTED are prevented
9190 because either (1) it is a component_ref or preincrement,
9191 in which case it was stabilized above, or (2) it is an array_ref
9192 with constant index in an array in a register, which is
9193 safe to reevaluate. */
9194 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9195 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9196 ? MINUS_EXPR : PLUS_EXPR),
9197 TREE_TYPE (exp),
9198 incremented,
9199 TREE_OPERAND (exp, 1));
9201 while (TREE_CODE (incremented) == NOP_EXPR
9202 || TREE_CODE (incremented) == CONVERT_EXPR)
9204 newexp = convert (TREE_TYPE (incremented), newexp);
9205 incremented = TREE_OPERAND (incremented, 0);
9208 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9209 return post ? op0 : temp;
9212 if (post)
9214 /* We have a true reference to the value in OP0.
9215 If there is an insn to add or subtract in this mode, queue it.
9216 Queueing the increment insn avoids the register shuffling
9217 that often results if we must increment now and first save
9218 the old value for subsequent use. */
9220 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9221 op0 = stabilize (op0);
9222 #endif
9224 icode = (int) this_optab->handlers[(int) mode].insn_code;
9225 if (icode != (int) CODE_FOR_nothing
9226 /* Make sure that OP0 is valid for operands 0 and 1
9227 of the insn we want to queue. */
9228 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9229 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9231 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9232 op1 = force_reg (mode, op1);
9234 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9236 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9238 rtx addr = (general_operand (XEXP (op0, 0), mode)
9239 ? force_reg (Pmode, XEXP (op0, 0))
9240 : copy_to_reg (XEXP (op0, 0)));
9241 rtx temp, result;
9243 op0 = change_address (op0, VOIDmode, addr);
9244 temp = force_reg (GET_MODE (op0), op0);
9245 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9246 op1 = force_reg (mode, op1);
9248 /* The increment queue is LIFO, thus we have to `queue'
9249 the instructions in reverse order. */
9250 enqueue_insn (op0, gen_move_insn (op0, temp));
9251 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9252 return result;
9256 /* Preincrement, or we can't increment with one simple insn. */
9257 if (post)
9258 /* Save a copy of the value before inc or dec, to return it later. */
9259 temp = value = copy_to_reg (op0);
9260 else
9261 /* Arrange to return the incremented value. */
9262 /* Copy the rtx because expand_binop will protect from the queue,
9263 and the results of that would be invalid for us to return
9264 if our caller does emit_queue before using our result. */
9265 temp = copy_rtx (value = op0);
9267 /* Increment however we can. */
9268 op1 = expand_binop (mode, this_optab, value, op1,
9269 current_function_check_memory_usage ? NULL_RTX : op0,
9270 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9271 /* Make sure the value is stored into OP0. */
9272 if (op1 != op0)
9273 emit_move_insn (op0, op1);
9275 return temp;
9278 /* At the start of a function, record that we have no previously-pushed
9279 arguments waiting to be popped. */
9281 void
9282 init_pending_stack_adjust ()
9284 pending_stack_adjust = 0;
9287 /* When exiting from function, if safe, clear out any pending stack adjust
9288 so the adjustment won't get done.
9290 Note, if the current function calls alloca, then it must have a
9291 frame pointer regardless of the value of flag_omit_frame_pointer. */
9293 void
9294 clear_pending_stack_adjust ()
9296 #ifdef EXIT_IGNORE_STACK
9297 if (optimize > 0
9298 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9299 && EXIT_IGNORE_STACK
9300 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9301 && ! flag_inline_functions)
9303 stack_pointer_delta -= pending_stack_adjust,
9304 pending_stack_adjust = 0;
9306 #endif
9309 /* Pop any previously-pushed arguments that have not been popped yet. */
9311 void
9312 do_pending_stack_adjust ()
9314 if (inhibit_defer_pop == 0)
9316 if (pending_stack_adjust != 0)
9317 adjust_stack (GEN_INT (pending_stack_adjust));
9318 pending_stack_adjust = 0;
9322 /* Expand conditional expressions. */
9324 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9325 LABEL is an rtx of code CODE_LABEL, in this function and all the
9326 functions here. */
9328 void
9329 jumpifnot (exp, label)
9330 tree exp;
9331 rtx label;
9333 do_jump (exp, label, NULL_RTX);
9336 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9338 void
9339 jumpif (exp, label)
9340 tree exp;
9341 rtx label;
9343 do_jump (exp, NULL_RTX, label);
9346 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9347 the result is zero, or IF_TRUE_LABEL if the result is one.
9348 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9349 meaning fall through in that case.
9351 do_jump always does any pending stack adjust except when it does not
9352 actually perform a jump. An example where there is no jump
9353 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9355 This function is responsible for optimizing cases such as
9356 &&, || and comparison operators in EXP. */
9358 void
9359 do_jump (exp, if_false_label, if_true_label)
9360 tree exp;
9361 rtx if_false_label, if_true_label;
9363 register enum tree_code code = TREE_CODE (exp);
9364 /* Some cases need to create a label to jump to
9365 in order to properly fall through.
9366 These cases set DROP_THROUGH_LABEL nonzero. */
9367 rtx drop_through_label = 0;
9368 rtx temp;
9369 int i;
9370 tree type;
9371 enum machine_mode mode;
9373 #ifdef MAX_INTEGER_COMPUTATION_MODE
9374 check_max_integer_computation_mode (exp);
9375 #endif
9377 emit_queue ();
9379 switch (code)
9381 case ERROR_MARK:
9382 break;
9384 case INTEGER_CST:
9385 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9386 if (temp)
9387 emit_jump (temp);
9388 break;
9390 #if 0
9391 /* This is not true with #pragma weak */
9392 case ADDR_EXPR:
9393 /* The address of something can never be zero. */
9394 if (if_true_label)
9395 emit_jump (if_true_label);
9396 break;
9397 #endif
9399 case NOP_EXPR:
9400 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9401 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9402 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9403 goto normal;
9404 case CONVERT_EXPR:
9405 /* If we are narrowing the operand, we have to do the compare in the
9406 narrower mode. */
9407 if ((TYPE_PRECISION (TREE_TYPE (exp))
9408 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9409 goto normal;
9410 case NON_LVALUE_EXPR:
9411 case REFERENCE_EXPR:
9412 case ABS_EXPR:
9413 case NEGATE_EXPR:
9414 case LROTATE_EXPR:
9415 case RROTATE_EXPR:
9416 /* These cannot change zero->non-zero or vice versa. */
9417 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9418 break;
9420 case WITH_RECORD_EXPR:
9421 /* Put the object on the placeholder list, recurse through our first
9422 operand, and pop the list. */
9423 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9424 placeholder_list);
9425 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9426 placeholder_list = TREE_CHAIN (placeholder_list);
9427 break;
9429 #if 0
9430 /* This is never less insns than evaluating the PLUS_EXPR followed by
9431 a test and can be longer if the test is eliminated. */
9432 case PLUS_EXPR:
9433 /* Reduce to minus. */
9434 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9435 TREE_OPERAND (exp, 0),
9436 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9437 TREE_OPERAND (exp, 1))));
9438 /* Process as MINUS. */
9439 #endif
9441 case MINUS_EXPR:
9442 /* Non-zero iff operands of minus differ. */
9443 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9444 TREE_OPERAND (exp, 0),
9445 TREE_OPERAND (exp, 1)),
9446 NE, NE, if_false_label, if_true_label);
9447 break;
9449 case BIT_AND_EXPR:
9450 /* If we are AND'ing with a small constant, do this comparison in the
9451 smallest type that fits. If the machine doesn't have comparisons
9452 that small, it will be converted back to the wider comparison.
9453 This helps if we are testing the sign bit of a narrower object.
9454 combine can't do this for us because it can't know whether a
9455 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9457 if (! SLOW_BYTE_ACCESS
9458 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9459 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9460 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9461 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9462 && (type = type_for_mode (mode, 1)) != 0
9463 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9464 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9465 != CODE_FOR_nothing))
9467 do_jump (convert (type, exp), if_false_label, if_true_label);
9468 break;
9470 goto normal;
9472 case TRUTH_NOT_EXPR:
9473 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9474 break;
9476 case TRUTH_ANDIF_EXPR:
9477 if (if_false_label == 0)
9478 if_false_label = drop_through_label = gen_label_rtx ();
9479 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9480 start_cleanup_deferral ();
9481 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9482 end_cleanup_deferral ();
9483 break;
9485 case TRUTH_ORIF_EXPR:
9486 if (if_true_label == 0)
9487 if_true_label = drop_through_label = gen_label_rtx ();
9488 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9489 start_cleanup_deferral ();
9490 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9491 end_cleanup_deferral ();
9492 break;
9494 case COMPOUND_EXPR:
9495 push_temp_slots ();
9496 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9497 preserve_temp_slots (NULL_RTX);
9498 free_temp_slots ();
9499 pop_temp_slots ();
9500 emit_queue ();
9501 do_pending_stack_adjust ();
9502 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9503 break;
9505 case COMPONENT_REF:
9506 case BIT_FIELD_REF:
9507 case ARRAY_REF:
9509 HOST_WIDE_INT bitsize, bitpos;
9510 int unsignedp;
9511 enum machine_mode mode;
9512 tree type;
9513 tree offset;
9514 int volatilep = 0;
9515 unsigned int alignment;
9517 /* Get description of this reference. We don't actually care
9518 about the underlying object here. */
9519 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9520 &unsignedp, &volatilep, &alignment);
9522 type = type_for_size (bitsize, unsignedp);
9523 if (! SLOW_BYTE_ACCESS
9524 && type != 0 && bitsize >= 0
9525 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9526 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9527 != CODE_FOR_nothing))
9529 do_jump (convert (type, exp), if_false_label, if_true_label);
9530 break;
9532 goto normal;
9535 case COND_EXPR:
9536 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9537 if (integer_onep (TREE_OPERAND (exp, 1))
9538 && integer_zerop (TREE_OPERAND (exp, 2)))
9539 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9541 else if (integer_zerop (TREE_OPERAND (exp, 1))
9542 && integer_onep (TREE_OPERAND (exp, 2)))
9543 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9545 else
9547 register rtx label1 = gen_label_rtx ();
9548 drop_through_label = gen_label_rtx ();
9550 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9552 start_cleanup_deferral ();
9553 /* Now the THEN-expression. */
9554 do_jump (TREE_OPERAND (exp, 1),
9555 if_false_label ? if_false_label : drop_through_label,
9556 if_true_label ? if_true_label : drop_through_label);
9557 /* In case the do_jump just above never jumps. */
9558 do_pending_stack_adjust ();
9559 emit_label (label1);
9561 /* Now the ELSE-expression. */
9562 do_jump (TREE_OPERAND (exp, 2),
9563 if_false_label ? if_false_label : drop_through_label,
9564 if_true_label ? if_true_label : drop_through_label);
9565 end_cleanup_deferral ();
9567 break;
9569 case EQ_EXPR:
9571 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9573 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9574 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9576 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9577 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9578 do_jump
9579 (fold
9580 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9581 fold (build (EQ_EXPR, TREE_TYPE (exp),
9582 fold (build1 (REALPART_EXPR,
9583 TREE_TYPE (inner_type),
9584 exp0)),
9585 fold (build1 (REALPART_EXPR,
9586 TREE_TYPE (inner_type),
9587 exp1)))),
9588 fold (build (EQ_EXPR, TREE_TYPE (exp),
9589 fold (build1 (IMAGPART_EXPR,
9590 TREE_TYPE (inner_type),
9591 exp0)),
9592 fold (build1 (IMAGPART_EXPR,
9593 TREE_TYPE (inner_type),
9594 exp1)))))),
9595 if_false_label, if_true_label);
9598 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9599 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9601 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9602 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9603 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9604 else
9605 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9606 break;
9609 case NE_EXPR:
9611 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9613 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9614 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9616 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9617 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9618 do_jump
9619 (fold
9620 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9621 fold (build (NE_EXPR, TREE_TYPE (exp),
9622 fold (build1 (REALPART_EXPR,
9623 TREE_TYPE (inner_type),
9624 exp0)),
9625 fold (build1 (REALPART_EXPR,
9626 TREE_TYPE (inner_type),
9627 exp1)))),
9628 fold (build (NE_EXPR, TREE_TYPE (exp),
9629 fold (build1 (IMAGPART_EXPR,
9630 TREE_TYPE (inner_type),
9631 exp0)),
9632 fold (build1 (IMAGPART_EXPR,
9633 TREE_TYPE (inner_type),
9634 exp1)))))),
9635 if_false_label, if_true_label);
9638 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9639 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9641 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9642 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9643 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9644 else
9645 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9646 break;
9649 case LT_EXPR:
9650 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9651 if (GET_MODE_CLASS (mode) == MODE_INT
9652 && ! can_compare_p (LT, mode, ccp_jump))
9653 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9654 else
9655 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9656 break;
9658 case LE_EXPR:
9659 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9660 if (GET_MODE_CLASS (mode) == MODE_INT
9661 && ! can_compare_p (LE, mode, ccp_jump))
9662 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9663 else
9664 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9665 break;
9667 case GT_EXPR:
9668 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9669 if (GET_MODE_CLASS (mode) == MODE_INT
9670 && ! can_compare_p (GT, mode, ccp_jump))
9671 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9672 else
9673 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9674 break;
9676 case GE_EXPR:
9677 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9678 if (GET_MODE_CLASS (mode) == MODE_INT
9679 && ! can_compare_p (GE, mode, ccp_jump))
9680 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9681 else
9682 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9683 break;
9685 case UNORDERED_EXPR:
9686 case ORDERED_EXPR:
9688 enum rtx_code cmp, rcmp;
9689 int do_rev;
9691 if (code == UNORDERED_EXPR)
9692 cmp = UNORDERED, rcmp = ORDERED;
9693 else
9694 cmp = ORDERED, rcmp = UNORDERED;
9695 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9697 do_rev = 0;
9698 if (! can_compare_p (cmp, mode, ccp_jump)
9699 && (can_compare_p (rcmp, mode, ccp_jump)
9700 /* If the target doesn't provide either UNORDERED or ORDERED
9701 comparisons, canonicalize on UNORDERED for the library. */
9702 || rcmp == UNORDERED))
9703 do_rev = 1;
9705 if (! do_rev)
9706 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9707 else
9708 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9710 break;
9713 enum rtx_code rcode1;
9714 enum tree_code tcode2;
9716 case UNLT_EXPR:
9717 rcode1 = UNLT;
9718 tcode2 = LT_EXPR;
9719 goto unordered_bcc;
9720 case UNLE_EXPR:
9721 rcode1 = UNLE;
9722 tcode2 = LE_EXPR;
9723 goto unordered_bcc;
9724 case UNGT_EXPR:
9725 rcode1 = UNGT;
9726 tcode2 = GT_EXPR;
9727 goto unordered_bcc;
9728 case UNGE_EXPR:
9729 rcode1 = UNGE;
9730 tcode2 = GE_EXPR;
9731 goto unordered_bcc;
9732 case UNEQ_EXPR:
9733 rcode1 = UNEQ;
9734 tcode2 = EQ_EXPR;
9735 goto unordered_bcc;
9737 unordered_bcc:
9738 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9739 if (can_compare_p (rcode1, mode, ccp_jump))
9740 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9741 if_true_label);
9742 else
9744 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9745 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9746 tree cmp0, cmp1;
9748 /* If the target doesn't support combined unordered
9749 compares, decompose into UNORDERED + comparison. */
9750 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9751 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9752 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9753 do_jump (exp, if_false_label, if_true_label);
9756 break;
9758 default:
9759 normal:
9760 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9761 #if 0
9762 /* This is not needed any more and causes poor code since it causes
9763 comparisons and tests from non-SI objects to have different code
9764 sequences. */
9765 /* Copy to register to avoid generating bad insns by cse
9766 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9767 if (!cse_not_expected && GET_CODE (temp) == MEM)
9768 temp = copy_to_reg (temp);
9769 #endif
9770 do_pending_stack_adjust ();
9771 /* Do any postincrements in the expression that was tested. */
9772 emit_queue ();
9774 if (GET_CODE (temp) == CONST_INT
9775 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9776 || GET_CODE (temp) == LABEL_REF)
9778 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9779 if (target)
9780 emit_jump (target);
9782 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9783 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9784 /* Note swapping the labels gives us not-equal. */
9785 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9786 else if (GET_MODE (temp) != VOIDmode)
9787 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9788 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9789 GET_MODE (temp), NULL_RTX, 0,
9790 if_false_label, if_true_label);
9791 else
9792 abort ();
9795 if (drop_through_label)
9797 /* If do_jump produces code that might be jumped around,
9798 do any stack adjusts from that code, before the place
9799 where control merges in. */
9800 do_pending_stack_adjust ();
9801 emit_label (drop_through_label);
9805 /* Given a comparison expression EXP for values too wide to be compared
9806 with one insn, test the comparison and jump to the appropriate label.
9807 The code of EXP is ignored; we always test GT if SWAP is 0,
9808 and LT if SWAP is 1. */
9810 static void
9811 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9812 tree exp;
9813 int swap;
9814 rtx if_false_label, if_true_label;
9816 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9817 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9818 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9819 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9821 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9824 /* Compare OP0 with OP1, word at a time, in mode MODE.
9825 UNSIGNEDP says to do unsigned comparison.
9826 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9828 void
9829 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9830 enum machine_mode mode;
9831 int unsignedp;
9832 rtx op0, op1;
9833 rtx if_false_label, if_true_label;
9835 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9836 rtx drop_through_label = 0;
9837 int i;
9839 if (! if_true_label || ! if_false_label)
9840 drop_through_label = gen_label_rtx ();
9841 if (! if_true_label)
9842 if_true_label = drop_through_label;
9843 if (! if_false_label)
9844 if_false_label = drop_through_label;
9846 /* Compare a word at a time, high order first. */
9847 for (i = 0; i < nwords; i++)
9849 rtx op0_word, op1_word;
9851 if (WORDS_BIG_ENDIAN)
9853 op0_word = operand_subword_force (op0, i, mode);
9854 op1_word = operand_subword_force (op1, i, mode);
9856 else
9858 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9859 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9862 /* All but high-order word must be compared as unsigned. */
9863 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9864 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9865 NULL_RTX, if_true_label);
9867 /* Consider lower words only if these are equal. */
9868 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9869 NULL_RTX, 0, NULL_RTX, if_false_label);
9872 if (if_false_label)
9873 emit_jump (if_false_label);
9874 if (drop_through_label)
9875 emit_label (drop_through_label);
9878 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9879 with one insn, test the comparison and jump to the appropriate label. */
9881 static void
9882 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9883 tree exp;
9884 rtx if_false_label, if_true_label;
9886 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9887 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9888 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9889 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9890 int i;
9891 rtx drop_through_label = 0;
9893 if (! if_false_label)
9894 drop_through_label = if_false_label = gen_label_rtx ();
9896 for (i = 0; i < nwords; i++)
9897 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9898 operand_subword_force (op1, i, mode),
9899 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9900 word_mode, NULL_RTX, 0, if_false_label,
9901 NULL_RTX);
9903 if (if_true_label)
9904 emit_jump (if_true_label);
9905 if (drop_through_label)
9906 emit_label (drop_through_label);
9909 /* Jump according to whether OP0 is 0.
9910 We assume that OP0 has an integer mode that is too wide
9911 for the available compare insns. */
9913 void
9914 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9915 rtx op0;
9916 rtx if_false_label, if_true_label;
9918 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9919 rtx part;
9920 int i;
9921 rtx drop_through_label = 0;
9923 /* The fastest way of doing this comparison on almost any machine is to
9924 "or" all the words and compare the result. If all have to be loaded
9925 from memory and this is a very wide item, it's possible this may
9926 be slower, but that's highly unlikely. */
9928 part = gen_reg_rtx (word_mode);
9929 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9930 for (i = 1; i < nwords && part != 0; i++)
9931 part = expand_binop (word_mode, ior_optab, part,
9932 operand_subword_force (op0, i, GET_MODE (op0)),
9933 part, 1, OPTAB_WIDEN);
9935 if (part != 0)
9937 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9938 NULL_RTX, 0, if_false_label, if_true_label);
9940 return;
9943 /* If we couldn't do the "or" simply, do this with a series of compares. */
9944 if (! if_false_label)
9945 drop_through_label = if_false_label = gen_label_rtx ();
9947 for (i = 0; i < nwords; i++)
9948 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9949 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9950 if_false_label, NULL_RTX);
9952 if (if_true_label)
9953 emit_jump (if_true_label);
9955 if (drop_through_label)
9956 emit_label (drop_through_label);
9959 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9960 (including code to compute the values to be compared)
9961 and set (CC0) according to the result.
9962 The decision as to signed or unsigned comparison must be made by the caller.
9964 We force a stack adjustment unless there are currently
9965 things pushed on the stack that aren't yet used.
9967 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9968 compared.
9970 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9971 size of MODE should be used. */
9974 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9975 register rtx op0, op1;
9976 enum rtx_code code;
9977 int unsignedp;
9978 enum machine_mode mode;
9979 rtx size;
9980 unsigned int align;
9982 rtx tem;
9984 /* If one operand is constant, make it the second one. Only do this
9985 if the other operand is not constant as well. */
9987 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9988 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9990 tem = op0;
9991 op0 = op1;
9992 op1 = tem;
9993 code = swap_condition (code);
9996 if (flag_force_mem)
9998 op0 = force_not_mem (op0);
9999 op1 = force_not_mem (op1);
10002 do_pending_stack_adjust ();
10004 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10005 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10006 return tem;
10008 #if 0
10009 /* There's no need to do this now that combine.c can eliminate lots of
10010 sign extensions. This can be less efficient in certain cases on other
10011 machines. */
10013 /* If this is a signed equality comparison, we can do it as an
10014 unsigned comparison since zero-extension is cheaper than sign
10015 extension and comparisons with zero are done as unsigned. This is
10016 the case even on machines that can do fast sign extension, since
10017 zero-extension is easier to combine with other operations than
10018 sign-extension is. If we are comparing against a constant, we must
10019 convert it to what it would look like unsigned. */
10020 if ((code == EQ || code == NE) && ! unsignedp
10021 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10023 if (GET_CODE (op1) == CONST_INT
10024 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10025 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10026 unsignedp = 1;
10028 #endif
10030 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10032 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10035 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10036 The decision as to signed or unsigned comparison must be made by the caller.
10038 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10039 compared.
10041 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10042 size of MODE should be used. */
10044 void
10045 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
10046 if_false_label, if_true_label)
10047 register rtx op0, op1;
10048 enum rtx_code code;
10049 int unsignedp;
10050 enum machine_mode mode;
10051 rtx size;
10052 unsigned int align;
10053 rtx if_false_label, if_true_label;
10055 rtx tem;
10056 int dummy_true_label = 0;
10058 /* Reverse the comparison if that is safe and we want to jump if it is
10059 false. */
10060 if (! if_true_label && ! FLOAT_MODE_P (mode))
10062 if_true_label = if_false_label;
10063 if_false_label = 0;
10064 code = reverse_condition (code);
10067 /* If one operand is constant, make it the second one. Only do this
10068 if the other operand is not constant as well. */
10070 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10071 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10073 tem = op0;
10074 op0 = op1;
10075 op1 = tem;
10076 code = swap_condition (code);
10079 if (flag_force_mem)
10081 op0 = force_not_mem (op0);
10082 op1 = force_not_mem (op1);
10085 do_pending_stack_adjust ();
10087 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10088 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10090 if (tem == const_true_rtx)
10092 if (if_true_label)
10093 emit_jump (if_true_label);
10095 else
10097 if (if_false_label)
10098 emit_jump (if_false_label);
10100 return;
10103 #if 0
10104 /* There's no need to do this now that combine.c can eliminate lots of
10105 sign extensions. This can be less efficient in certain cases on other
10106 machines. */
10108 /* If this is a signed equality comparison, we can do it as an
10109 unsigned comparison since zero-extension is cheaper than sign
10110 extension and comparisons with zero are done as unsigned. This is
10111 the case even on machines that can do fast sign extension, since
10112 zero-extension is easier to combine with other operations than
10113 sign-extension is. If we are comparing against a constant, we must
10114 convert it to what it would look like unsigned. */
10115 if ((code == EQ || code == NE) && ! unsignedp
10116 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10118 if (GET_CODE (op1) == CONST_INT
10119 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10120 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10121 unsignedp = 1;
10123 #endif
10125 if (! if_true_label)
10127 dummy_true_label = 1;
10128 if_true_label = gen_label_rtx ();
10131 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10132 if_true_label);
10134 if (if_false_label)
10135 emit_jump (if_false_label);
10136 if (dummy_true_label)
10137 emit_label (if_true_label);
10140 /* Generate code for a comparison expression EXP (including code to compute
10141 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10142 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10143 generated code will drop through.
10144 SIGNED_CODE should be the rtx operation for this comparison for
10145 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10147 We force a stack adjustment unless there are currently
10148 things pushed on the stack that aren't yet used. */
10150 static void
10151 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10152 if_true_label)
10153 register tree exp;
10154 enum rtx_code signed_code, unsigned_code;
10155 rtx if_false_label, if_true_label;
10157 unsigned int align0, align1;
10158 register rtx op0, op1;
10159 register tree type;
10160 register enum machine_mode mode;
10161 int unsignedp;
10162 enum rtx_code code;
10164 /* Don't crash if the comparison was erroneous. */
10165 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10166 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10167 return;
10169 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10170 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10171 return;
10173 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10174 mode = TYPE_MODE (type);
10175 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10176 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10177 || (GET_MODE_BITSIZE (mode)
10178 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10179 1)))))))
10181 /* op0 might have been replaced by promoted constant, in which
10182 case the type of second argument should be used. */
10183 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10184 mode = TYPE_MODE (type);
10186 unsignedp = TREE_UNSIGNED (type);
10187 code = unsignedp ? unsigned_code : signed_code;
10189 #ifdef HAVE_canonicalize_funcptr_for_compare
10190 /* If function pointers need to be "canonicalized" before they can
10191 be reliably compared, then canonicalize them. */
10192 if (HAVE_canonicalize_funcptr_for_compare
10193 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10194 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10195 == FUNCTION_TYPE))
10197 rtx new_op0 = gen_reg_rtx (mode);
10199 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10200 op0 = new_op0;
10203 if (HAVE_canonicalize_funcptr_for_compare
10204 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10205 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10206 == FUNCTION_TYPE))
10208 rtx new_op1 = gen_reg_rtx (mode);
10210 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10211 op1 = new_op1;
10213 #endif
10215 /* Do any postincrements in the expression that was tested. */
10216 emit_queue ();
10218 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10219 ((mode == BLKmode)
10220 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10221 MIN (align0, align1),
10222 if_false_label, if_true_label);
10225 /* Generate code to calculate EXP using a store-flag instruction
10226 and return an rtx for the result. EXP is either a comparison
10227 or a TRUTH_NOT_EXPR whose operand is a comparison.
10229 If TARGET is nonzero, store the result there if convenient.
10231 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10232 cheap.
10234 Return zero if there is no suitable set-flag instruction
10235 available on this machine.
10237 Once expand_expr has been called on the arguments of the comparison,
10238 we are committed to doing the store flag, since it is not safe to
10239 re-evaluate the expression. We emit the store-flag insn by calling
10240 emit_store_flag, but only expand the arguments if we have a reason
10241 to believe that emit_store_flag will be successful. If we think that
10242 it will, but it isn't, we have to simulate the store-flag with a
10243 set/jump/set sequence. */
10245 static rtx
10246 do_store_flag (exp, target, mode, only_cheap)
10247 tree exp;
10248 rtx target;
10249 enum machine_mode mode;
10250 int only_cheap;
10252 enum rtx_code code;
10253 tree arg0, arg1, type;
10254 tree tem;
10255 enum machine_mode operand_mode;
10256 int invert = 0;
10257 int unsignedp;
10258 rtx op0, op1;
10259 enum insn_code icode;
10260 rtx subtarget = target;
10261 rtx result, label;
10263 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10264 result at the end. We can't simply invert the test since it would
10265 have already been inverted if it were valid. This case occurs for
10266 some floating-point comparisons. */
10268 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10269 invert = 1, exp = TREE_OPERAND (exp, 0);
10271 arg0 = TREE_OPERAND (exp, 0);
10272 arg1 = TREE_OPERAND (exp, 1);
10274 /* Don't crash if the comparison was erroneous. */
10275 if (arg0 == error_mark_node || arg1 == error_mark_node)
10276 return const0_rtx;
10278 type = TREE_TYPE (arg0);
10279 operand_mode = TYPE_MODE (type);
10280 unsignedp = TREE_UNSIGNED (type);
10282 /* We won't bother with BLKmode store-flag operations because it would mean
10283 passing a lot of information to emit_store_flag. */
10284 if (operand_mode == BLKmode)
10285 return 0;
10287 /* We won't bother with store-flag operations involving function pointers
10288 when function pointers must be canonicalized before comparisons. */
10289 #ifdef HAVE_canonicalize_funcptr_for_compare
10290 if (HAVE_canonicalize_funcptr_for_compare
10291 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10292 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10293 == FUNCTION_TYPE))
10294 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10295 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10296 == FUNCTION_TYPE))))
10297 return 0;
10298 #endif
10300 STRIP_NOPS (arg0);
10301 STRIP_NOPS (arg1);
10303 /* Get the rtx comparison code to use. We know that EXP is a comparison
10304 operation of some type. Some comparisons against 1 and -1 can be
10305 converted to comparisons with zero. Do so here so that the tests
10306 below will be aware that we have a comparison with zero. These
10307 tests will not catch constants in the first operand, but constants
10308 are rarely passed as the first operand. */
10310 switch (TREE_CODE (exp))
10312 case EQ_EXPR:
10313 code = EQ;
10314 break;
10315 case NE_EXPR:
10316 code = NE;
10317 break;
10318 case LT_EXPR:
10319 if (integer_onep (arg1))
10320 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10321 else
10322 code = unsignedp ? LTU : LT;
10323 break;
10324 case LE_EXPR:
10325 if (! unsignedp && integer_all_onesp (arg1))
10326 arg1 = integer_zero_node, code = LT;
10327 else
10328 code = unsignedp ? LEU : LE;
10329 break;
10330 case GT_EXPR:
10331 if (! unsignedp && integer_all_onesp (arg1))
10332 arg1 = integer_zero_node, code = GE;
10333 else
10334 code = unsignedp ? GTU : GT;
10335 break;
10336 case GE_EXPR:
10337 if (integer_onep (arg1))
10338 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10339 else
10340 code = unsignedp ? GEU : GE;
10341 break;
10343 case UNORDERED_EXPR:
10344 code = UNORDERED;
10345 break;
10346 case ORDERED_EXPR:
10347 code = ORDERED;
10348 break;
10349 case UNLT_EXPR:
10350 code = UNLT;
10351 break;
10352 case UNLE_EXPR:
10353 code = UNLE;
10354 break;
10355 case UNGT_EXPR:
10356 code = UNGT;
10357 break;
10358 case UNGE_EXPR:
10359 code = UNGE;
10360 break;
10361 case UNEQ_EXPR:
10362 code = UNEQ;
10363 break;
10365 default:
10366 abort ();
10369 /* Put a constant second. */
10370 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10372 tem = arg0; arg0 = arg1; arg1 = tem;
10373 code = swap_condition (code);
10376 /* If this is an equality or inequality test of a single bit, we can
10377 do this by shifting the bit being tested to the low-order bit and
10378 masking the result with the constant 1. If the condition was EQ,
10379 we xor it with 1. This does not require an scc insn and is faster
10380 than an scc insn even if we have it. */
10382 if ((code == NE || code == EQ)
10383 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10384 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10386 tree inner = TREE_OPERAND (arg0, 0);
10387 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10388 int ops_unsignedp;
10390 /* If INNER is a right shift of a constant and it plus BITNUM does
10391 not overflow, adjust BITNUM and INNER. */
10393 if (TREE_CODE (inner) == RSHIFT_EXPR
10394 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10395 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10396 && bitnum < TYPE_PRECISION (type)
10397 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10398 bitnum - TYPE_PRECISION (type)))
10400 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10401 inner = TREE_OPERAND (inner, 0);
10404 /* If we are going to be able to omit the AND below, we must do our
10405 operations as unsigned. If we must use the AND, we have a choice.
10406 Normally unsigned is faster, but for some machines signed is. */
10407 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10408 #ifdef LOAD_EXTEND_OP
10409 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10410 #else
10412 #endif
10415 if (! get_subtarget (subtarget)
10416 || GET_MODE (subtarget) != operand_mode
10417 || ! safe_from_p (subtarget, inner, 1))
10418 subtarget = 0;
10420 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10422 if (bitnum != 0)
10423 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10424 size_int (bitnum), subtarget, ops_unsignedp);
10426 if (GET_MODE (op0) != mode)
10427 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10429 if ((code == EQ && ! invert) || (code == NE && invert))
10430 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10431 ops_unsignedp, OPTAB_LIB_WIDEN);
10433 /* Put the AND last so it can combine with more things. */
10434 if (bitnum != TYPE_PRECISION (type) - 1)
10435 op0 = expand_and (op0, const1_rtx, subtarget);
10437 return op0;
10440 /* Now see if we are likely to be able to do this. Return if not. */
10441 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10442 return 0;
10444 icode = setcc_gen_code[(int) code];
10445 if (icode == CODE_FOR_nothing
10446 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10448 /* We can only do this if it is one of the special cases that
10449 can be handled without an scc insn. */
10450 if ((code == LT && integer_zerop (arg1))
10451 || (! only_cheap && code == GE && integer_zerop (arg1)))
10453 else if (BRANCH_COST >= 0
10454 && ! only_cheap && (code == NE || code == EQ)
10455 && TREE_CODE (type) != REAL_TYPE
10456 && ((abs_optab->handlers[(int) operand_mode].insn_code
10457 != CODE_FOR_nothing)
10458 || (ffs_optab->handlers[(int) operand_mode].insn_code
10459 != CODE_FOR_nothing)))
10461 else
10462 return 0;
10465 if (! get_subtarget (target)
10466 || GET_MODE (subtarget) != operand_mode
10467 || ! safe_from_p (subtarget, arg1, 1))
10468 subtarget = 0;
10470 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10471 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10473 if (target == 0)
10474 target = gen_reg_rtx (mode);
10476 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10477 because, if the emit_store_flag does anything it will succeed and
10478 OP0 and OP1 will not be used subsequently. */
10480 result = emit_store_flag (target, code,
10481 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10482 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10483 operand_mode, unsignedp, 1);
10485 if (result)
10487 if (invert)
10488 result = expand_binop (mode, xor_optab, result, const1_rtx,
10489 result, 0, OPTAB_LIB_WIDEN);
10490 return result;
10493 /* If this failed, we have to do this with set/compare/jump/set code. */
10494 if (GET_CODE (target) != REG
10495 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10496 target = gen_reg_rtx (GET_MODE (target));
10498 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10499 result = compare_from_rtx (op0, op1, code, unsignedp,
10500 operand_mode, NULL_RTX, 0);
10501 if (GET_CODE (result) == CONST_INT)
10502 return (((result == const0_rtx && ! invert)
10503 || (result != const0_rtx && invert))
10504 ? const0_rtx : const1_rtx);
10506 label = gen_label_rtx ();
10507 if (bcc_gen_fctn[(int) code] == 0)
10508 abort ();
10510 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10511 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10512 emit_label (label);
10514 return target;
10517 /* Generate a tablejump instruction (used for switch statements). */
10519 #ifdef HAVE_tablejump
10521 /* INDEX is the value being switched on, with the lowest value
10522 in the table already subtracted.
10523 MODE is its expected mode (needed if INDEX is constant).
10524 RANGE is the length of the jump table.
10525 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10527 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10528 index value is out of range. */
10530 void
10531 do_tablejump (index, mode, range, table_label, default_label)
10532 rtx index, range, table_label, default_label;
10533 enum machine_mode mode;
10535 register rtx temp, vector;
10537 /* Do an unsigned comparison (in the proper mode) between the index
10538 expression and the value which represents the length of the range.
10539 Since we just finished subtracting the lower bound of the range
10540 from the index expression, this comparison allows us to simultaneously
10541 check that the original index expression value is both greater than
10542 or equal to the minimum value of the range and less than or equal to
10543 the maximum value of the range. */
10545 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10546 0, default_label);
10548 /* If index is in range, it must fit in Pmode.
10549 Convert to Pmode so we can index with it. */
10550 if (mode != Pmode)
10551 index = convert_to_mode (Pmode, index, 1);
10553 /* Don't let a MEM slip thru, because then INDEX that comes
10554 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10555 and break_out_memory_refs will go to work on it and mess it up. */
10556 #ifdef PIC_CASE_VECTOR_ADDRESS
10557 if (flag_pic && GET_CODE (index) != REG)
10558 index = copy_to_mode_reg (Pmode, index);
10559 #endif
10561 /* If flag_force_addr were to affect this address
10562 it could interfere with the tricky assumptions made
10563 about addresses that contain label-refs,
10564 which may be valid only very near the tablejump itself. */
10565 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10566 GET_MODE_SIZE, because this indicates how large insns are. The other
10567 uses should all be Pmode, because they are addresses. This code
10568 could fail if addresses and insns are not the same size. */
10569 index = gen_rtx_PLUS (Pmode,
10570 gen_rtx_MULT (Pmode, index,
10571 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10572 gen_rtx_LABEL_REF (Pmode, table_label));
10573 #ifdef PIC_CASE_VECTOR_ADDRESS
10574 if (flag_pic)
10575 index = PIC_CASE_VECTOR_ADDRESS (index);
10576 else
10577 #endif
10578 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10579 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10580 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10581 RTX_UNCHANGING_P (vector) = 1;
10582 convert_move (temp, vector, 0);
10584 emit_jump_insn (gen_tablejump (temp, table_label));
10586 /* If we are generating PIC code or if the table is PC-relative, the
10587 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10588 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10589 emit_barrier ();
10592 #endif /* HAVE_tablejump */