(arm_comp_type_attributes): Simply and comment tests on type attributes.
[official-gcc.git] / gcc / expr.c
blobfebdbc669e87170c3111e608c2f2e30646926483
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000
3 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "machmode.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "obstack.h"
29 #include "flags.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "except.h"
33 #include "function.h"
34 #include "insn-flags.h"
35 #include "insn-codes.h"
36 #include "insn-config.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "recog.h"
40 #include "output.h"
41 #include "typeclass.h"
42 #include "defaults.h"
43 #include "toplev.h"
44 #include "ggc.h"
45 #include "tm_p.h"
47 /* Decide whether a function's arguments should be processed
48 from first to last or from last to first.
50 They should if the stack and args grow in opposite directions, but
51 only if we have push insns. */
53 #ifdef PUSH_ROUNDING
55 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
56 #define PUSH_ARGS_REVERSED /* If it's last to first */
57 #endif
59 #endif
61 #ifndef STACK_PUSH_CODE
62 #ifdef STACK_GROWS_DOWNWARD
63 #define STACK_PUSH_CODE PRE_DEC
64 #else
65 #define STACK_PUSH_CODE PRE_INC
66 #endif
67 #endif
69 /* Assume that case vectors are not pc-relative. */
70 #ifndef CASE_VECTOR_PC_RELATIVE
71 #define CASE_VECTOR_PC_RELATIVE 0
72 #endif
74 /* If this is nonzero, we do not bother generating VOLATILE
75 around volatile memory references, and we are willing to
76 output indirect addresses. If cse is to follow, we reject
77 indirect addresses so a useful potential cse is generated;
78 if it is used only once, instruction combination will produce
79 the same indirect address eventually. */
80 int cse_not_expected;
82 /* Nonzero to generate code for all the subroutines within an
83 expression before generating the upper levels of the expression.
84 Nowadays this is never zero. */
85 int do_preexpand_calls = 1;
87 /* Don't check memory usage, since code is being emitted to check a memory
88 usage. Used when current_function_check_memory_usage is true, to avoid
89 infinite recursion. */
90 static int in_check_memory_usage;
92 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
93 static tree placeholder_list = 0;
95 /* This structure is used by move_by_pieces to describe the move to
96 be performed. */
97 struct move_by_pieces
99 rtx to;
100 rtx to_addr;
101 int autinc_to;
102 int explicit_inc_to;
103 int to_struct;
104 int to_readonly;
105 rtx from;
106 rtx from_addr;
107 int autinc_from;
108 int explicit_inc_from;
109 int from_struct;
110 int from_readonly;
111 int len;
112 int offset;
113 int reverse;
116 /* This structure is used by clear_by_pieces to describe the clear to
117 be performed. */
119 struct clear_by_pieces
121 rtx to;
122 rtx to_addr;
123 int autinc_to;
124 int explicit_inc_to;
125 int to_struct;
126 int len;
127 int offset;
128 int reverse;
131 extern struct obstack permanent_obstack;
133 static rtx get_push_address PARAMS ((int));
135 static rtx enqueue_insn PARAMS ((rtx, rtx));
136 static int move_by_pieces_ninsns PARAMS ((unsigned int, unsigned int));
137 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
138 struct move_by_pieces *));
139 static void clear_by_pieces PARAMS ((rtx, int, unsigned int));
140 static void clear_by_pieces_1 PARAMS ((rtx (*) (rtx, ...),
141 enum machine_mode,
142 struct clear_by_pieces *));
143 static int is_zeros_p PARAMS ((tree));
144 static int mostly_zeros_p PARAMS ((tree));
145 static void store_constructor_field PARAMS ((rtx, int, int, enum machine_mode,
146 tree, tree, unsigned int, int));
147 static void store_constructor PARAMS ((tree, rtx, unsigned int, int, int));
148 static rtx store_field PARAMS ((rtx, int, int, enum machine_mode,
149 tree, enum machine_mode, int,
150 unsigned int, int, int));
151 static enum memory_use_mode
152 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
153 static tree save_noncopied_parts PARAMS ((tree, tree));
154 static tree init_noncopied_parts PARAMS ((tree, tree));
155 static int safe_from_p PARAMS ((rtx, tree, int));
156 static int fixed_type_p PARAMS ((tree));
157 static rtx var_rtx PARAMS ((tree));
158 static int readonly_fields_p PARAMS ((tree));
159 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
160 static rtx expand_increment PARAMS ((tree, int, int));
161 static void preexpand_calls PARAMS ((tree));
162 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
163 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
164 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code, rtx, rtx));
165 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
167 /* Record for each mode whether we can move a register directly to or
168 from an object of that mode in memory. If we can't, we won't try
169 to use that mode directly when accessing a field of that mode. */
171 static char direct_load[NUM_MACHINE_MODES];
172 static char direct_store[NUM_MACHINE_MODES];
174 /* If a memory-to-memory move would take MOVE_RATIO or more simple
175 move-instruction sequences, we will do a movstr or libcall instead. */
177 #ifndef MOVE_RATIO
178 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
179 #define MOVE_RATIO 2
180 #else
181 /* If we are optimizing for space (-Os), cut down the default move ratio */
182 #define MOVE_RATIO (optimize_size ? 3 : 15)
183 #endif
184 #endif
186 /* This macro is used to determine whether move_by_pieces should be called
187 to perform a structure copy. */
188 #ifndef MOVE_BY_PIECES_P
189 #define MOVE_BY_PIECES_P(SIZE, ALIGN) (move_by_pieces_ninsns \
190 (SIZE, ALIGN) < MOVE_RATIO)
191 #endif
193 /* This array records the insn_code of insns to perform block moves. */
194 enum insn_code movstr_optab[NUM_MACHINE_MODES];
196 /* This array records the insn_code of insns to perform block clears. */
197 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
199 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
201 #ifndef SLOW_UNALIGNED_ACCESS
202 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
203 #endif
205 /* This is run once per compilation to set up which modes can be used
206 directly in memory and to initialize the block move optab. */
208 void
209 init_expr_once ()
211 rtx insn, pat;
212 enum machine_mode mode;
213 int num_clobbers;
214 rtx mem, mem1;
215 char *free_point;
217 start_sequence ();
219 /* Since we are on the permanent obstack, we must be sure we save this
220 spot AFTER we call start_sequence, since it will reuse the rtl it
221 makes. */
222 free_point = (char *) oballoc (0);
224 /* Try indexing by frame ptr and try by stack ptr.
225 It is known that on the Convex the stack ptr isn't a valid index.
226 With luck, one or the other is valid on any machine. */
227 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
228 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
230 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
231 pat = PATTERN (insn);
233 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
234 mode = (enum machine_mode) ((int) mode + 1))
236 int regno;
237 rtx reg;
239 direct_load[(int) mode] = direct_store[(int) mode] = 0;
240 PUT_MODE (mem, mode);
241 PUT_MODE (mem1, mode);
243 /* See if there is some register that can be used in this mode and
244 directly loaded or stored from memory. */
246 if (mode != VOIDmode && mode != BLKmode)
247 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
248 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
249 regno++)
251 if (! HARD_REGNO_MODE_OK (regno, mode))
252 continue;
254 reg = gen_rtx_REG (mode, regno);
256 SET_SRC (pat) = mem;
257 SET_DEST (pat) = reg;
258 if (recog (pat, insn, &num_clobbers) >= 0)
259 direct_load[(int) mode] = 1;
261 SET_SRC (pat) = mem1;
262 SET_DEST (pat) = reg;
263 if (recog (pat, insn, &num_clobbers) >= 0)
264 direct_load[(int) mode] = 1;
266 SET_SRC (pat) = reg;
267 SET_DEST (pat) = mem;
268 if (recog (pat, insn, &num_clobbers) >= 0)
269 direct_store[(int) mode] = 1;
271 SET_SRC (pat) = reg;
272 SET_DEST (pat) = mem1;
273 if (recog (pat, insn, &num_clobbers) >= 0)
274 direct_store[(int) mode] = 1;
278 end_sequence ();
279 obfree (free_point);
282 /* This is run at the start of compiling a function. */
284 void
285 init_expr ()
287 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
289 pending_chain = 0;
290 pending_stack_adjust = 0;
291 inhibit_defer_pop = 0;
292 saveregs_value = 0;
293 apply_args_value = 0;
294 forced_labels = 0;
297 void
298 mark_expr_status (p)
299 struct expr_status *p;
301 if (p == NULL)
302 return;
304 ggc_mark_rtx (p->x_saveregs_value);
305 ggc_mark_rtx (p->x_apply_args_value);
306 ggc_mark_rtx (p->x_forced_labels);
309 void
310 free_expr_status (f)
311 struct function *f;
313 free (f->expr);
314 f->expr = NULL;
317 /* Small sanity check that the queue is empty at the end of a function. */
318 void
319 finish_expr_for_function ()
321 if (pending_chain)
322 abort ();
325 /* Manage the queue of increment instructions to be output
326 for POSTINCREMENT_EXPR expressions, etc. */
328 /* Queue up to increment (or change) VAR later. BODY says how:
329 BODY should be the same thing you would pass to emit_insn
330 to increment right away. It will go to emit_insn later on.
332 The value is a QUEUED expression to be used in place of VAR
333 where you want to guarantee the pre-incrementation value of VAR. */
335 static rtx
336 enqueue_insn (var, body)
337 rtx var, body;
339 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
340 body, pending_chain);
341 return pending_chain;
344 /* Use protect_from_queue to convert a QUEUED expression
345 into something that you can put immediately into an instruction.
346 If the queued incrementation has not happened yet,
347 protect_from_queue returns the variable itself.
348 If the incrementation has happened, protect_from_queue returns a temp
349 that contains a copy of the old value of the variable.
351 Any time an rtx which might possibly be a QUEUED is to be put
352 into an instruction, it must be passed through protect_from_queue first.
353 QUEUED expressions are not meaningful in instructions.
355 Do not pass a value through protect_from_queue and then hold
356 on to it for a while before putting it in an instruction!
357 If the queue is flushed in between, incorrect code will result. */
360 protect_from_queue (x, modify)
361 register rtx x;
362 int modify;
364 register RTX_CODE code = GET_CODE (x);
366 #if 0 /* A QUEUED can hang around after the queue is forced out. */
367 /* Shortcut for most common case. */
368 if (pending_chain == 0)
369 return x;
370 #endif
372 if (code != QUEUED)
374 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
375 use of autoincrement. Make a copy of the contents of the memory
376 location rather than a copy of the address, but not if the value is
377 of mode BLKmode. Don't modify X in place since it might be
378 shared. */
379 if (code == MEM && GET_MODE (x) != BLKmode
380 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
382 register rtx y = XEXP (x, 0);
383 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
385 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
386 MEM_COPY_ATTRIBUTES (new, x);
387 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
389 if (QUEUED_INSN (y))
391 register rtx temp = gen_reg_rtx (GET_MODE (new));
392 emit_insn_before (gen_move_insn (temp, new),
393 QUEUED_INSN (y));
394 return temp;
396 return new;
398 /* Otherwise, recursively protect the subexpressions of all
399 the kinds of rtx's that can contain a QUEUED. */
400 if (code == MEM)
402 rtx tem = protect_from_queue (XEXP (x, 0), 0);
403 if (tem != XEXP (x, 0))
405 x = copy_rtx (x);
406 XEXP (x, 0) = tem;
409 else if (code == PLUS || code == MULT)
411 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
412 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
413 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
415 x = copy_rtx (x);
416 XEXP (x, 0) = new0;
417 XEXP (x, 1) = new1;
420 return x;
422 /* If the increment has not happened, use the variable itself. */
423 if (QUEUED_INSN (x) == 0)
424 return QUEUED_VAR (x);
425 /* If the increment has happened and a pre-increment copy exists,
426 use that copy. */
427 if (QUEUED_COPY (x) != 0)
428 return QUEUED_COPY (x);
429 /* The increment has happened but we haven't set up a pre-increment copy.
430 Set one up now, and use it. */
431 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
432 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
433 QUEUED_INSN (x));
434 return QUEUED_COPY (x);
437 /* Return nonzero if X contains a QUEUED expression:
438 if it contains anything that will be altered by a queued increment.
439 We handle only combinations of MEM, PLUS, MINUS and MULT operators
440 since memory addresses generally contain only those. */
443 queued_subexp_p (x)
444 rtx x;
446 register enum rtx_code code = GET_CODE (x);
447 switch (code)
449 case QUEUED:
450 return 1;
451 case MEM:
452 return queued_subexp_p (XEXP (x, 0));
453 case MULT:
454 case PLUS:
455 case MINUS:
456 return (queued_subexp_p (XEXP (x, 0))
457 || queued_subexp_p (XEXP (x, 1)));
458 default:
459 return 0;
463 /* Perform all the pending incrementations. */
465 void
466 emit_queue ()
468 register rtx p;
469 while ((p = pending_chain))
471 rtx body = QUEUED_BODY (p);
473 if (GET_CODE (body) == SEQUENCE)
475 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
476 emit_insn (QUEUED_BODY (p));
478 else
479 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
480 pending_chain = QUEUED_NEXT (p);
484 /* Copy data from FROM to TO, where the machine modes are not the same.
485 Both modes may be integer, or both may be floating.
486 UNSIGNEDP should be nonzero if FROM is an unsigned type.
487 This causes zero-extension instead of sign-extension. */
489 void
490 convert_move (to, from, unsignedp)
491 register rtx to, from;
492 int unsignedp;
494 enum machine_mode to_mode = GET_MODE (to);
495 enum machine_mode from_mode = GET_MODE (from);
496 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
497 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
498 enum insn_code code;
499 rtx libcall;
501 /* rtx code for making an equivalent value. */
502 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
504 to = protect_from_queue (to, 1);
505 from = protect_from_queue (from, 0);
507 if (to_real != from_real)
508 abort ();
510 /* If FROM is a SUBREG that indicates that we have already done at least
511 the required extension, strip it. We don't handle such SUBREGs as
512 TO here. */
514 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
515 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
516 >= GET_MODE_SIZE (to_mode))
517 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
518 from = gen_lowpart (to_mode, from), from_mode = to_mode;
520 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
521 abort ();
523 if (to_mode == from_mode
524 || (from_mode == VOIDmode && CONSTANT_P (from)))
526 emit_move_insn (to, from);
527 return;
530 if (to_real)
532 rtx value;
534 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
536 /* Try converting directly if the insn is supported. */
537 if ((code = can_extend_p (to_mode, from_mode, 0))
538 != CODE_FOR_nothing)
540 emit_unop_insn (code, to, from, UNKNOWN);
541 return;
545 #ifdef HAVE_trunchfqf2
546 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
548 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
549 return;
551 #endif
552 #ifdef HAVE_trunctqfqf2
553 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
555 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
556 return;
558 #endif
559 #ifdef HAVE_truncsfqf2
560 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
562 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
563 return;
565 #endif
566 #ifdef HAVE_truncdfqf2
567 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
569 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
570 return;
572 #endif
573 #ifdef HAVE_truncxfqf2
574 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
576 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
577 return;
579 #endif
580 #ifdef HAVE_trunctfqf2
581 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
583 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
584 return;
586 #endif
588 #ifdef HAVE_trunctqfhf2
589 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
591 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
592 return;
594 #endif
595 #ifdef HAVE_truncsfhf2
596 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
598 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
599 return;
601 #endif
602 #ifdef HAVE_truncdfhf2
603 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
605 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
606 return;
608 #endif
609 #ifdef HAVE_truncxfhf2
610 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
612 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
613 return;
615 #endif
616 #ifdef HAVE_trunctfhf2
617 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
619 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
620 return;
622 #endif
624 #ifdef HAVE_truncsftqf2
625 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
627 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
628 return;
630 #endif
631 #ifdef HAVE_truncdftqf2
632 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
634 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
635 return;
637 #endif
638 #ifdef HAVE_truncxftqf2
639 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
641 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
642 return;
644 #endif
645 #ifdef HAVE_trunctftqf2
646 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
648 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
649 return;
651 #endif
653 #ifdef HAVE_truncdfsf2
654 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
656 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
657 return;
659 #endif
660 #ifdef HAVE_truncxfsf2
661 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
663 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
664 return;
666 #endif
667 #ifdef HAVE_trunctfsf2
668 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
670 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
671 return;
673 #endif
674 #ifdef HAVE_truncxfdf2
675 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
677 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
678 return;
680 #endif
681 #ifdef HAVE_trunctfdf2
682 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
684 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
685 return;
687 #endif
689 libcall = (rtx) 0;
690 switch (from_mode)
692 case SFmode:
693 switch (to_mode)
695 case DFmode:
696 libcall = extendsfdf2_libfunc;
697 break;
699 case XFmode:
700 libcall = extendsfxf2_libfunc;
701 break;
703 case TFmode:
704 libcall = extendsftf2_libfunc;
705 break;
707 default:
708 break;
710 break;
712 case DFmode:
713 switch (to_mode)
715 case SFmode:
716 libcall = truncdfsf2_libfunc;
717 break;
719 case XFmode:
720 libcall = extenddfxf2_libfunc;
721 break;
723 case TFmode:
724 libcall = extenddftf2_libfunc;
725 break;
727 default:
728 break;
730 break;
732 case XFmode:
733 switch (to_mode)
735 case SFmode:
736 libcall = truncxfsf2_libfunc;
737 break;
739 case DFmode:
740 libcall = truncxfdf2_libfunc;
741 break;
743 default:
744 break;
746 break;
748 case TFmode:
749 switch (to_mode)
751 case SFmode:
752 libcall = trunctfsf2_libfunc;
753 break;
755 case DFmode:
756 libcall = trunctfdf2_libfunc;
757 break;
759 default:
760 break;
762 break;
764 default:
765 break;
768 if (libcall == (rtx) 0)
769 /* This conversion is not implemented yet. */
770 abort ();
772 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
773 1, from, from_mode);
774 emit_move_insn (to, value);
775 return;
778 /* Now both modes are integers. */
780 /* Handle expanding beyond a word. */
781 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
782 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
784 rtx insns;
785 rtx lowpart;
786 rtx fill_value;
787 rtx lowfrom;
788 int i;
789 enum machine_mode lowpart_mode;
790 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
792 /* Try converting directly if the insn is supported. */
793 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
794 != CODE_FOR_nothing)
796 /* If FROM is a SUBREG, put it into a register. Do this
797 so that we always generate the same set of insns for
798 better cse'ing; if an intermediate assignment occurred,
799 we won't be doing the operation directly on the SUBREG. */
800 if (optimize > 0 && GET_CODE (from) == SUBREG)
801 from = force_reg (from_mode, from);
802 emit_unop_insn (code, to, from, equiv_code);
803 return;
805 /* Next, try converting via full word. */
806 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
807 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
808 != CODE_FOR_nothing))
810 if (GET_CODE (to) == REG)
811 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
812 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
813 emit_unop_insn (code, to,
814 gen_lowpart (word_mode, to), equiv_code);
815 return;
818 /* No special multiword conversion insn; do it by hand. */
819 start_sequence ();
821 /* Since we will turn this into a no conflict block, we must ensure
822 that the source does not overlap the target. */
824 if (reg_overlap_mentioned_p (to, from))
825 from = force_reg (from_mode, from);
827 /* Get a copy of FROM widened to a word, if necessary. */
828 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
829 lowpart_mode = word_mode;
830 else
831 lowpart_mode = from_mode;
833 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
835 lowpart = gen_lowpart (lowpart_mode, to);
836 emit_move_insn (lowpart, lowfrom);
838 /* Compute the value to put in each remaining word. */
839 if (unsignedp)
840 fill_value = const0_rtx;
841 else
843 #ifdef HAVE_slt
844 if (HAVE_slt
845 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
846 && STORE_FLAG_VALUE == -1)
848 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
849 lowpart_mode, 0, 0);
850 fill_value = gen_reg_rtx (word_mode);
851 emit_insn (gen_slt (fill_value));
853 else
854 #endif
856 fill_value
857 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
858 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
859 NULL_RTX, 0);
860 fill_value = convert_to_mode (word_mode, fill_value, 1);
864 /* Fill the remaining words. */
865 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
867 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
868 rtx subword = operand_subword (to, index, 1, to_mode);
870 if (subword == 0)
871 abort ();
873 if (fill_value != subword)
874 emit_move_insn (subword, fill_value);
877 insns = get_insns ();
878 end_sequence ();
880 emit_no_conflict_block (insns, to, from, NULL_RTX,
881 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
882 return;
885 /* Truncating multi-word to a word or less. */
886 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
887 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
889 if (!((GET_CODE (from) == MEM
890 && ! MEM_VOLATILE_P (from)
891 && direct_load[(int) to_mode]
892 && ! mode_dependent_address_p (XEXP (from, 0)))
893 || GET_CODE (from) == REG
894 || GET_CODE (from) == SUBREG))
895 from = force_reg (from_mode, from);
896 convert_move (to, gen_lowpart (word_mode, from), 0);
897 return;
900 /* Handle pointer conversion */ /* SPEE 900220 */
901 if (to_mode == PQImode)
903 if (from_mode != QImode)
904 from = convert_to_mode (QImode, from, unsignedp);
906 #ifdef HAVE_truncqipqi2
907 if (HAVE_truncqipqi2)
909 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
910 return;
912 #endif /* HAVE_truncqipqi2 */
913 abort ();
916 if (from_mode == PQImode)
918 if (to_mode != QImode)
920 from = convert_to_mode (QImode, from, unsignedp);
921 from_mode = QImode;
923 else
925 #ifdef HAVE_extendpqiqi2
926 if (HAVE_extendpqiqi2)
928 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
929 return;
931 #endif /* HAVE_extendpqiqi2 */
932 abort ();
936 if (to_mode == PSImode)
938 if (from_mode != SImode)
939 from = convert_to_mode (SImode, from, unsignedp);
941 #ifdef HAVE_truncsipsi2
942 if (HAVE_truncsipsi2)
944 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
945 return;
947 #endif /* HAVE_truncsipsi2 */
948 abort ();
951 if (from_mode == PSImode)
953 if (to_mode != SImode)
955 from = convert_to_mode (SImode, from, unsignedp);
956 from_mode = SImode;
958 else
960 #ifdef HAVE_extendpsisi2
961 if (HAVE_extendpsisi2)
963 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
964 return;
966 #endif /* HAVE_extendpsisi2 */
967 abort ();
971 if (to_mode == PDImode)
973 if (from_mode != DImode)
974 from = convert_to_mode (DImode, from, unsignedp);
976 #ifdef HAVE_truncdipdi2
977 if (HAVE_truncdipdi2)
979 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
980 return;
982 #endif /* HAVE_truncdipdi2 */
983 abort ();
986 if (from_mode == PDImode)
988 if (to_mode != DImode)
990 from = convert_to_mode (DImode, from, unsignedp);
991 from_mode = DImode;
993 else
995 #ifdef HAVE_extendpdidi2
996 if (HAVE_extendpdidi2)
998 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
999 return;
1001 #endif /* HAVE_extendpdidi2 */
1002 abort ();
1006 /* Now follow all the conversions between integers
1007 no more than a word long. */
1009 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1010 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1011 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1012 GET_MODE_BITSIZE (from_mode)))
1014 if (!((GET_CODE (from) == MEM
1015 && ! MEM_VOLATILE_P (from)
1016 && direct_load[(int) to_mode]
1017 && ! mode_dependent_address_p (XEXP (from, 0)))
1018 || GET_CODE (from) == REG
1019 || GET_CODE (from) == SUBREG))
1020 from = force_reg (from_mode, from);
1021 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1022 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1023 from = copy_to_reg (from);
1024 emit_move_insn (to, gen_lowpart (to_mode, from));
1025 return;
1028 /* Handle extension. */
1029 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1031 /* Convert directly if that works. */
1032 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1033 != CODE_FOR_nothing)
1035 emit_unop_insn (code, to, from, equiv_code);
1036 return;
1038 else
1040 enum machine_mode intermediate;
1041 rtx tmp;
1042 tree shift_amount;
1044 /* Search for a mode to convert via. */
1045 for (intermediate = from_mode; intermediate != VOIDmode;
1046 intermediate = GET_MODE_WIDER_MODE (intermediate))
1047 if (((can_extend_p (to_mode, intermediate, unsignedp)
1048 != CODE_FOR_nothing)
1049 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1050 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1051 GET_MODE_BITSIZE (intermediate))))
1052 && (can_extend_p (intermediate, from_mode, unsignedp)
1053 != CODE_FOR_nothing))
1055 convert_move (to, convert_to_mode (intermediate, from,
1056 unsignedp), unsignedp);
1057 return;
1060 /* No suitable intermediate mode.
1061 Generate what we need with shifts. */
1062 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1063 - GET_MODE_BITSIZE (from_mode), 0);
1064 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1065 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1066 to, unsignedp);
1067 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1068 to, unsignedp);
1069 if (tmp != to)
1070 emit_move_insn (to, tmp);
1071 return;
1075 /* Support special truncate insns for certain modes. */
1077 if (from_mode == DImode && to_mode == SImode)
1079 #ifdef HAVE_truncdisi2
1080 if (HAVE_truncdisi2)
1082 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1083 return;
1085 #endif
1086 convert_move (to, force_reg (from_mode, from), unsignedp);
1087 return;
1090 if (from_mode == DImode && to_mode == HImode)
1092 #ifdef HAVE_truncdihi2
1093 if (HAVE_truncdihi2)
1095 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1096 return;
1098 #endif
1099 convert_move (to, force_reg (from_mode, from), unsignedp);
1100 return;
1103 if (from_mode == DImode && to_mode == QImode)
1105 #ifdef HAVE_truncdiqi2
1106 if (HAVE_truncdiqi2)
1108 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1109 return;
1111 #endif
1112 convert_move (to, force_reg (from_mode, from), unsignedp);
1113 return;
1116 if (from_mode == SImode && to_mode == HImode)
1118 #ifdef HAVE_truncsihi2
1119 if (HAVE_truncsihi2)
1121 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1122 return;
1124 #endif
1125 convert_move (to, force_reg (from_mode, from), unsignedp);
1126 return;
1129 if (from_mode == SImode && to_mode == QImode)
1131 #ifdef HAVE_truncsiqi2
1132 if (HAVE_truncsiqi2)
1134 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1135 return;
1137 #endif
1138 convert_move (to, force_reg (from_mode, from), unsignedp);
1139 return;
1142 if (from_mode == HImode && to_mode == QImode)
1144 #ifdef HAVE_trunchiqi2
1145 if (HAVE_trunchiqi2)
1147 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1148 return;
1150 #endif
1151 convert_move (to, force_reg (from_mode, from), unsignedp);
1152 return;
1155 if (from_mode == TImode && to_mode == DImode)
1157 #ifdef HAVE_trunctidi2
1158 if (HAVE_trunctidi2)
1160 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1161 return;
1163 #endif
1164 convert_move (to, force_reg (from_mode, from), unsignedp);
1165 return;
1168 if (from_mode == TImode && to_mode == SImode)
1170 #ifdef HAVE_trunctisi2
1171 if (HAVE_trunctisi2)
1173 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1174 return;
1176 #endif
1177 convert_move (to, force_reg (from_mode, from), unsignedp);
1178 return;
1181 if (from_mode == TImode && to_mode == HImode)
1183 #ifdef HAVE_trunctihi2
1184 if (HAVE_trunctihi2)
1186 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1187 return;
1189 #endif
1190 convert_move (to, force_reg (from_mode, from), unsignedp);
1191 return;
1194 if (from_mode == TImode && to_mode == QImode)
1196 #ifdef HAVE_trunctiqi2
1197 if (HAVE_trunctiqi2)
1199 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1200 return;
1202 #endif
1203 convert_move (to, force_reg (from_mode, from), unsignedp);
1204 return;
1207 /* Handle truncation of volatile memrefs, and so on;
1208 the things that couldn't be truncated directly,
1209 and for which there was no special instruction. */
1210 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1212 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1213 emit_move_insn (to, temp);
1214 return;
1217 /* Mode combination is not recognized. */
1218 abort ();
1221 /* Return an rtx for a value that would result
1222 from converting X to mode MODE.
1223 Both X and MODE may be floating, or both integer.
1224 UNSIGNEDP is nonzero if X is an unsigned value.
1225 This can be done by referring to a part of X in place
1226 or by copying to a new temporary with conversion.
1228 This function *must not* call protect_from_queue
1229 except when putting X into an insn (in which case convert_move does it). */
1232 convert_to_mode (mode, x, unsignedp)
1233 enum machine_mode mode;
1234 rtx x;
1235 int unsignedp;
1237 return convert_modes (mode, VOIDmode, x, unsignedp);
1240 /* Return an rtx for a value that would result
1241 from converting X from mode OLDMODE to mode MODE.
1242 Both modes may be floating, or both integer.
1243 UNSIGNEDP is nonzero if X is an unsigned value.
1245 This can be done by referring to a part of X in place
1246 or by copying to a new temporary with conversion.
1248 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1250 This function *must not* call protect_from_queue
1251 except when putting X into an insn (in which case convert_move does it). */
1254 convert_modes (mode, oldmode, x, unsignedp)
1255 enum machine_mode mode, oldmode;
1256 rtx x;
1257 int unsignedp;
1259 register rtx temp;
1261 /* If FROM is a SUBREG that indicates that we have already done at least
1262 the required extension, strip it. */
1264 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1265 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1266 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1267 x = gen_lowpart (mode, x);
1269 if (GET_MODE (x) != VOIDmode)
1270 oldmode = GET_MODE (x);
1272 if (mode == oldmode)
1273 return x;
1275 /* There is one case that we must handle specially: If we are converting
1276 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1277 we are to interpret the constant as unsigned, gen_lowpart will do
1278 the wrong if the constant appears negative. What we want to do is
1279 make the high-order word of the constant zero, not all ones. */
1281 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1282 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1283 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1285 HOST_WIDE_INT val = INTVAL (x);
1287 if (oldmode != VOIDmode
1288 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1290 int width = GET_MODE_BITSIZE (oldmode);
1292 /* We need to zero extend VAL. */
1293 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1296 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1299 /* We can do this with a gen_lowpart if both desired and current modes
1300 are integer, and this is either a constant integer, a register, or a
1301 non-volatile MEM. Except for the constant case where MODE is no
1302 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1304 if ((GET_CODE (x) == CONST_INT
1305 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1306 || (GET_MODE_CLASS (mode) == MODE_INT
1307 && GET_MODE_CLASS (oldmode) == MODE_INT
1308 && (GET_CODE (x) == CONST_DOUBLE
1309 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1310 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1311 && direct_load[(int) mode])
1312 || (GET_CODE (x) == REG
1313 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1314 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1316 /* ?? If we don't know OLDMODE, we have to assume here that
1317 X does not need sign- or zero-extension. This may not be
1318 the case, but it's the best we can do. */
1319 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1320 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1322 HOST_WIDE_INT val = INTVAL (x);
1323 int width = GET_MODE_BITSIZE (oldmode);
1325 /* We must sign or zero-extend in this case. Start by
1326 zero-extending, then sign extend if we need to. */
1327 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1328 if (! unsignedp
1329 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1330 val |= (HOST_WIDE_INT) (-1) << width;
1332 return GEN_INT (val);
1335 return gen_lowpart (mode, x);
1338 temp = gen_reg_rtx (mode);
1339 convert_move (temp, x, unsignedp);
1340 return temp;
1344 /* This macro is used to determine what the largest unit size that
1345 move_by_pieces can use is. */
1347 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1348 move efficiently, as opposed to MOVE_MAX which is the maximum
1349 number of bhytes we can move with a single instruction. */
1351 #ifndef MOVE_MAX_PIECES
1352 #define MOVE_MAX_PIECES MOVE_MAX
1353 #endif
1355 /* Generate several move instructions to copy LEN bytes
1356 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1357 The caller must pass FROM and TO
1358 through protect_from_queue before calling.
1359 ALIGN (in bytes) is maximum alignment we can assume. */
1361 void
1362 move_by_pieces (to, from, len, align)
1363 rtx to, from;
1364 int len;
1365 unsigned int align;
1367 struct move_by_pieces data;
1368 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1369 int max_size = MOVE_MAX_PIECES + 1;
1370 enum machine_mode mode = VOIDmode, tmode;
1371 enum insn_code icode;
1373 data.offset = 0;
1374 data.to_addr = to_addr;
1375 data.from_addr = from_addr;
1376 data.to = to;
1377 data.from = from;
1378 data.autinc_to
1379 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1380 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1381 data.autinc_from
1382 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1383 || GET_CODE (from_addr) == POST_INC
1384 || GET_CODE (from_addr) == POST_DEC);
1386 data.explicit_inc_from = 0;
1387 data.explicit_inc_to = 0;
1388 data.reverse
1389 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1390 if (data.reverse) data.offset = len;
1391 data.len = len;
1393 data.to_struct = MEM_IN_STRUCT_P (to);
1394 data.from_struct = MEM_IN_STRUCT_P (from);
1395 data.to_readonly = RTX_UNCHANGING_P (to);
1396 data.from_readonly = RTX_UNCHANGING_P (from);
1398 /* If copying requires more than two move insns,
1399 copy addresses to registers (to make displacements shorter)
1400 and use post-increment if available. */
1401 if (!(data.autinc_from && data.autinc_to)
1402 && move_by_pieces_ninsns (len, align) > 2)
1404 /* Find the mode of the largest move... */
1405 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1406 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1407 if (GET_MODE_SIZE (tmode) < max_size)
1408 mode = tmode;
1410 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1412 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1413 data.autinc_from = 1;
1414 data.explicit_inc_from = -1;
1416 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1418 data.from_addr = copy_addr_to_reg (from_addr);
1419 data.autinc_from = 1;
1420 data.explicit_inc_from = 1;
1422 if (!data.autinc_from && CONSTANT_P (from_addr))
1423 data.from_addr = copy_addr_to_reg (from_addr);
1424 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1426 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1427 data.autinc_to = 1;
1428 data.explicit_inc_to = -1;
1430 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1432 data.to_addr = copy_addr_to_reg (to_addr);
1433 data.autinc_to = 1;
1434 data.explicit_inc_to = 1;
1436 if (!data.autinc_to && CONSTANT_P (to_addr))
1437 data.to_addr = copy_addr_to_reg (to_addr);
1440 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1441 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1442 align = MOVE_MAX;
1444 /* First move what we can in the largest integer mode, then go to
1445 successively smaller modes. */
1447 while (max_size > 1)
1449 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1450 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1451 if (GET_MODE_SIZE (tmode) < max_size)
1452 mode = tmode;
1454 if (mode == VOIDmode)
1455 break;
1457 icode = mov_optab->handlers[(int) mode].insn_code;
1458 if (icode != CODE_FOR_nothing
1459 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1460 (unsigned int) GET_MODE_SIZE (mode)))
1461 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1463 max_size = GET_MODE_SIZE (mode);
1466 /* The code above should have handled everything. */
1467 if (data.len > 0)
1468 abort ();
1471 /* Return number of insns required to move L bytes by pieces.
1472 ALIGN (in bytes) is maximum alignment we can assume. */
1474 static int
1475 move_by_pieces_ninsns (l, align)
1476 unsigned int l;
1477 unsigned int align;
1479 register int n_insns = 0;
1480 int max_size = MOVE_MAX + 1;
1482 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1483 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1484 align = MOVE_MAX;
1486 while (max_size > 1)
1488 enum machine_mode mode = VOIDmode, tmode;
1489 enum insn_code icode;
1491 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1492 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1493 if (GET_MODE_SIZE (tmode) < max_size)
1494 mode = tmode;
1496 if (mode == VOIDmode)
1497 break;
1499 icode = mov_optab->handlers[(int) mode].insn_code;
1500 if (icode != CODE_FOR_nothing
1501 && align >= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
1502 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1504 max_size = GET_MODE_SIZE (mode);
1507 return n_insns;
1510 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1511 with move instructions for mode MODE. GENFUN is the gen_... function
1512 to make a move insn for that mode. DATA has all the other info. */
1514 static void
1515 move_by_pieces_1 (genfun, mode, data)
1516 rtx (*genfun) PARAMS ((rtx, ...));
1517 enum machine_mode mode;
1518 struct move_by_pieces *data;
1520 register int size = GET_MODE_SIZE (mode);
1521 register rtx to1, from1;
1523 while (data->len >= size)
1525 if (data->reverse) data->offset -= size;
1527 to1 = (data->autinc_to
1528 ? gen_rtx_MEM (mode, data->to_addr)
1529 : copy_rtx (change_address (data->to, mode,
1530 plus_constant (data->to_addr,
1531 data->offset))));
1532 MEM_IN_STRUCT_P (to1) = data->to_struct;
1533 RTX_UNCHANGING_P (to1) = data->to_readonly;
1535 from1
1536 = (data->autinc_from
1537 ? gen_rtx_MEM (mode, data->from_addr)
1538 : copy_rtx (change_address (data->from, mode,
1539 plus_constant (data->from_addr,
1540 data->offset))));
1541 MEM_IN_STRUCT_P (from1) = data->from_struct;
1542 RTX_UNCHANGING_P (from1) = data->from_readonly;
1544 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1545 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1546 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1547 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1549 emit_insn ((*genfun) (to1, from1));
1550 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1551 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1552 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1553 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1555 if (! data->reverse) data->offset += size;
1557 data->len -= size;
1561 /* Emit code to move a block Y to a block X.
1562 This may be done with string-move instructions,
1563 with multiple scalar move instructions, or with a library call.
1565 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1566 with mode BLKmode.
1567 SIZE is an rtx that says how long they are.
1568 ALIGN is the maximum alignment we can assume they have,
1569 measured in bytes.
1571 Return the address of the new block, if memcpy is called and returns it,
1572 0 otherwise. */
1575 emit_block_move (x, y, size, align)
1576 rtx x, y;
1577 rtx size;
1578 unsigned int align;
1580 rtx retval = 0;
1581 #ifdef TARGET_MEM_FUNCTIONS
1582 static tree fn;
1583 tree call_expr, arg_list;
1584 #endif
1586 if (GET_MODE (x) != BLKmode)
1587 abort ();
1589 if (GET_MODE (y) != BLKmode)
1590 abort ();
1592 x = protect_from_queue (x, 1);
1593 y = protect_from_queue (y, 0);
1594 size = protect_from_queue (size, 0);
1596 if (GET_CODE (x) != MEM)
1597 abort ();
1598 if (GET_CODE (y) != MEM)
1599 abort ();
1600 if (size == 0)
1601 abort ();
1603 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1604 move_by_pieces (x, y, INTVAL (size), align);
1605 else
1607 /* Try the most limited insn first, because there's no point
1608 including more than one in the machine description unless
1609 the more limited one has some advantage. */
1611 rtx opalign = GEN_INT (align);
1612 enum machine_mode mode;
1614 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1615 mode = GET_MODE_WIDER_MODE (mode))
1617 enum insn_code code = movstr_optab[(int) mode];
1618 insn_operand_predicate_fn pred;
1620 if (code != CODE_FOR_nothing
1621 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1622 here because if SIZE is less than the mode mask, as it is
1623 returned by the macro, it will definitely be less than the
1624 actual mode mask. */
1625 && ((GET_CODE (size) == CONST_INT
1626 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1627 <= (GET_MODE_MASK (mode) >> 1)))
1628 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1629 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1630 || (*pred) (x, BLKmode))
1631 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1632 || (*pred) (y, BLKmode))
1633 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1634 || (*pred) (opalign, VOIDmode)))
1636 rtx op2;
1637 rtx last = get_last_insn ();
1638 rtx pat;
1640 op2 = convert_to_mode (mode, size, 1);
1641 pred = insn_data[(int) code].operand[2].predicate;
1642 if (pred != 0 && ! (*pred) (op2, mode))
1643 op2 = copy_to_mode_reg (mode, op2);
1645 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1646 if (pat)
1648 emit_insn (pat);
1649 return 0;
1651 else
1652 delete_insns_since (last);
1656 /* X, Y, or SIZE may have been passed through protect_from_queue.
1658 It is unsafe to save the value generated by protect_from_queue
1659 and reuse it later. Consider what happens if emit_queue is
1660 called before the return value from protect_from_queue is used.
1662 Expansion of the CALL_EXPR below will call emit_queue before
1663 we are finished emitting RTL for argument setup. So if we are
1664 not careful we could get the wrong value for an argument.
1666 To avoid this problem we go ahead and emit code to copy X, Y &
1667 SIZE into new pseudos. We can then place those new pseudos
1668 into an RTL_EXPR and use them later, even after a call to
1669 emit_queue.
1671 Note this is not strictly needed for library calls since they
1672 do not call emit_queue before loading their arguments. However,
1673 we may need to have library calls call emit_queue in the future
1674 since failing to do so could cause problems for targets which
1675 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1676 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1677 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1679 #ifdef TARGET_MEM_FUNCTIONS
1680 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1681 #else
1682 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1683 TREE_UNSIGNED (integer_type_node));
1684 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1685 #endif
1687 #ifdef TARGET_MEM_FUNCTIONS
1688 /* It is incorrect to use the libcall calling conventions to call
1689 memcpy in this context.
1691 This could be a user call to memcpy and the user may wish to
1692 examine the return value from memcpy.
1694 For targets where libcalls and normal calls have different conventions
1695 for returning pointers, we could end up generating incorrect code.
1697 So instead of using a libcall sequence we build up a suitable
1698 CALL_EXPR and expand the call in the normal fashion. */
1699 if (fn == NULL_TREE)
1701 tree fntype;
1703 /* This was copied from except.c, I don't know if all this is
1704 necessary in this context or not. */
1705 fn = get_identifier ("memcpy");
1706 push_obstacks_nochange ();
1707 end_temporary_allocation ();
1708 fntype = build_pointer_type (void_type_node);
1709 fntype = build_function_type (fntype, NULL_TREE);
1710 fn = build_decl (FUNCTION_DECL, fn, fntype);
1711 ggc_add_tree_root (&fn, 1);
1712 DECL_EXTERNAL (fn) = 1;
1713 TREE_PUBLIC (fn) = 1;
1714 DECL_ARTIFICIAL (fn) = 1;
1715 make_decl_rtl (fn, NULL_PTR, 1);
1716 assemble_external (fn);
1717 pop_obstacks ();
1720 /* We need to make an argument list for the function call.
1722 memcpy has three arguments, the first two are void * addresses and
1723 the last is a size_t byte count for the copy. */
1724 arg_list
1725 = build_tree_list (NULL_TREE,
1726 make_tree (build_pointer_type (void_type_node), x));
1727 TREE_CHAIN (arg_list)
1728 = build_tree_list (NULL_TREE,
1729 make_tree (build_pointer_type (void_type_node), y));
1730 TREE_CHAIN (TREE_CHAIN (arg_list))
1731 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1732 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1734 /* Now we have to build up the CALL_EXPR itself. */
1735 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1736 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1737 call_expr, arg_list, NULL_TREE);
1738 TREE_SIDE_EFFECTS (call_expr) = 1;
1740 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1741 #else
1742 emit_library_call (bcopy_libfunc, 0,
1743 VOIDmode, 3, y, Pmode, x, Pmode,
1744 convert_to_mode (TYPE_MODE (integer_type_node), size,
1745 TREE_UNSIGNED (integer_type_node)),
1746 TYPE_MODE (integer_type_node));
1747 #endif
1750 return retval;
1753 /* Copy all or part of a value X into registers starting at REGNO.
1754 The number of registers to be filled is NREGS. */
1756 void
1757 move_block_to_reg (regno, x, nregs, mode)
1758 int regno;
1759 rtx x;
1760 int nregs;
1761 enum machine_mode mode;
1763 int i;
1764 #ifdef HAVE_load_multiple
1765 rtx pat;
1766 rtx last;
1767 #endif
1769 if (nregs == 0)
1770 return;
1772 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1773 x = validize_mem (force_const_mem (mode, x));
1775 /* See if the machine can do this with a load multiple insn. */
1776 #ifdef HAVE_load_multiple
1777 if (HAVE_load_multiple)
1779 last = get_last_insn ();
1780 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1781 GEN_INT (nregs));
1782 if (pat)
1784 emit_insn (pat);
1785 return;
1787 else
1788 delete_insns_since (last);
1790 #endif
1792 for (i = 0; i < nregs; i++)
1793 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1794 operand_subword_force (x, i, mode));
1797 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1798 The number of registers to be filled is NREGS. SIZE indicates the number
1799 of bytes in the object X. */
1802 void
1803 move_block_from_reg (regno, x, nregs, size)
1804 int regno;
1805 rtx x;
1806 int nregs;
1807 int size;
1809 int i;
1810 #ifdef HAVE_store_multiple
1811 rtx pat;
1812 rtx last;
1813 #endif
1814 enum machine_mode mode;
1816 /* If SIZE is that of a mode no bigger than a word, just use that
1817 mode's store operation. */
1818 if (size <= UNITS_PER_WORD
1819 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1821 emit_move_insn (change_address (x, mode, NULL),
1822 gen_rtx_REG (mode, regno));
1823 return;
1826 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1827 to the left before storing to memory. Note that the previous test
1828 doesn't handle all cases (e.g. SIZE == 3). */
1829 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1831 rtx tem = operand_subword (x, 0, 1, BLKmode);
1832 rtx shift;
1834 if (tem == 0)
1835 abort ();
1837 shift = expand_shift (LSHIFT_EXPR, word_mode,
1838 gen_rtx_REG (word_mode, regno),
1839 build_int_2 ((UNITS_PER_WORD - size)
1840 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1841 emit_move_insn (tem, shift);
1842 return;
1845 /* See if the machine can do this with a store multiple insn. */
1846 #ifdef HAVE_store_multiple
1847 if (HAVE_store_multiple)
1849 last = get_last_insn ();
1850 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1851 GEN_INT (nregs));
1852 if (pat)
1854 emit_insn (pat);
1855 return;
1857 else
1858 delete_insns_since (last);
1860 #endif
1862 for (i = 0; i < nregs; i++)
1864 rtx tem = operand_subword (x, i, 1, BLKmode);
1866 if (tem == 0)
1867 abort ();
1869 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1873 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1874 registers represented by a PARALLEL. SSIZE represents the total size of
1875 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1876 SRC in bits. */
1877 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1878 the balance will be in what would be the low-order memory addresses, i.e.
1879 left justified for big endian, right justified for little endian. This
1880 happens to be true for the targets currently using this support. If this
1881 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1882 would be needed. */
1884 void
1885 emit_group_load (dst, orig_src, ssize, align)
1886 rtx dst, orig_src;
1887 unsigned int align;
1888 int ssize;
1890 rtx *tmps, src;
1891 int start, i;
1893 if (GET_CODE (dst) != PARALLEL)
1894 abort ();
1896 /* Check for a NULL entry, used to indicate that the parameter goes
1897 both on the stack and in registers. */
1898 if (XEXP (XVECEXP (dst, 0, 0), 0))
1899 start = 0;
1900 else
1901 start = 1;
1903 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1905 /* If we won't be loading directly from memory, protect the real source
1906 from strange tricks we might play. */
1907 src = orig_src;
1908 if (GET_CODE (src) != MEM)
1910 if (GET_CODE (src) == VOIDmode)
1911 src = gen_reg_rtx (GET_MODE (dst));
1912 else
1913 src = gen_reg_rtx (GET_MODE (orig_src));
1914 emit_move_insn (src, orig_src);
1917 /* Process the pieces. */
1918 for (i = start; i < XVECLEN (dst, 0); i++)
1920 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1921 int bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1922 int bytelen = GET_MODE_SIZE (mode);
1923 int shift = 0;
1925 /* Handle trailing fragments that run over the size of the struct. */
1926 if (ssize >= 0 && bytepos + bytelen > ssize)
1928 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1929 bytelen = ssize - bytepos;
1930 if (bytelen <= 0)
1931 abort ();
1934 /* Optimize the access just a bit. */
1935 if (GET_CODE (src) == MEM
1936 && align * BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
1937 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1938 && bytelen == GET_MODE_SIZE (mode))
1940 tmps[i] = gen_reg_rtx (mode);
1941 emit_move_insn (tmps[i],
1942 change_address (src, mode,
1943 plus_constant (XEXP (src, 0),
1944 bytepos)));
1946 else if (GET_CODE (src) == CONCAT)
1948 if (bytepos == 0
1949 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1950 tmps[i] = XEXP (src, 0);
1951 else if (bytepos == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1952 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1953 tmps[i] = XEXP (src, 1);
1954 else
1955 abort ();
1957 else
1959 tmps[i] = extract_bit_field (src, bytelen*BITS_PER_UNIT,
1960 bytepos*BITS_PER_UNIT, 1, NULL_RTX,
1961 mode, mode, align, ssize);
1964 if (BYTES_BIG_ENDIAN && shift)
1966 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1967 tmps[i], 0, OPTAB_WIDEN);
1970 emit_queue();
1972 /* Copy the extracted pieces into the proper (probable) hard regs. */
1973 for (i = start; i < XVECLEN (dst, 0); i++)
1974 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1977 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1978 registers represented by a PARALLEL. SSIZE represents the total size of
1979 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
1981 void
1982 emit_group_store (orig_dst, src, ssize, align)
1983 rtx orig_dst, src;
1984 int ssize;
1985 unsigned int align;
1987 rtx *tmps, dst;
1988 int start, i;
1990 if (GET_CODE (src) != PARALLEL)
1991 abort ();
1993 /* Check for a NULL entry, used to indicate that the parameter goes
1994 both on the stack and in registers. */
1995 if (XEXP (XVECEXP (src, 0, 0), 0))
1996 start = 0;
1997 else
1998 start = 1;
2000 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
2002 /* Copy the (probable) hard regs into pseudos. */
2003 for (i = start; i < XVECLEN (src, 0); i++)
2005 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2006 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2007 emit_move_insn (tmps[i], reg);
2009 emit_queue();
2011 /* If we won't be storing directly into memory, protect the real destination
2012 from strange tricks we might play. */
2013 dst = orig_dst;
2014 if (GET_CODE (dst) == PARALLEL)
2016 rtx temp;
2018 /* We can get a PARALLEL dst if there is a conditional expression in
2019 a return statement. In that case, the dst and src are the same,
2020 so no action is necessary. */
2021 if (rtx_equal_p (dst, src))
2022 return;
2024 /* It is unclear if we can ever reach here, but we may as well handle
2025 it. Allocate a temporary, and split this into a store/load to/from
2026 the temporary. */
2028 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2029 emit_group_store (temp, src, ssize, align);
2030 emit_group_load (dst, temp, ssize, align);
2031 return;
2033 else if (GET_CODE (dst) != MEM)
2035 dst = gen_reg_rtx (GET_MODE (orig_dst));
2036 /* Make life a bit easier for combine. */
2037 emit_move_insn (dst, const0_rtx);
2039 else if (! MEM_IN_STRUCT_P (dst))
2041 /* store_bit_field requires that memory operations have
2042 mem_in_struct_p set; we might not. */
2044 dst = copy_rtx (orig_dst);
2045 MEM_SET_IN_STRUCT_P (dst, 1);
2048 /* Process the pieces. */
2049 for (i = start; i < XVECLEN (src, 0); i++)
2051 int bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2052 enum machine_mode mode = GET_MODE (tmps[i]);
2053 int bytelen = GET_MODE_SIZE (mode);
2055 /* Handle trailing fragments that run over the size of the struct. */
2056 if (ssize >= 0 && bytepos + bytelen > ssize)
2058 if (BYTES_BIG_ENDIAN)
2060 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2061 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2062 tmps[i], 0, OPTAB_WIDEN);
2064 bytelen = ssize - bytepos;
2067 /* Optimize the access just a bit. */
2068 if (GET_CODE (dst) == MEM
2069 && align * BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
2070 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2071 && bytelen == GET_MODE_SIZE (mode))
2072 emit_move_insn (change_address (dst, mode,
2073 plus_constant (XEXP (dst, 0),
2074 bytepos)),
2075 tmps[i]);
2076 else
2077 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2078 mode, tmps[i], align, ssize);
2081 emit_queue();
2083 /* Copy from the pseudo into the (probable) hard reg. */
2084 if (GET_CODE (dst) == REG)
2085 emit_move_insn (orig_dst, dst);
2088 /* Generate code to copy a BLKmode object of TYPE out of a
2089 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2090 is null, a stack temporary is created. TGTBLK is returned.
2092 The primary purpose of this routine is to handle functions
2093 that return BLKmode structures in registers. Some machines
2094 (the PA for example) want to return all small structures
2095 in registers regardless of the structure's alignment. */
2098 copy_blkmode_from_reg (tgtblk,srcreg,type)
2099 rtx tgtblk;
2100 rtx srcreg;
2101 tree type;
2103 int bytes = int_size_in_bytes (type);
2104 rtx src = NULL, dst = NULL;
2105 int bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2106 int bitpos, xbitpos, big_endian_correction = 0;
2108 if (tgtblk == 0)
2110 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2111 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2112 preserve_temp_slots (tgtblk);
2115 /* This code assumes srcreg is at least a full word. If it isn't,
2116 copy it into a new pseudo which is a full word. */
2117 if (GET_MODE (srcreg) != BLKmode
2118 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2119 srcreg = convert_to_mode (word_mode, srcreg,
2120 TREE_UNSIGNED (type));
2122 /* Structures whose size is not a multiple of a word are aligned
2123 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2124 machine, this means we must skip the empty high order bytes when
2125 calculating the bit offset. */
2126 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2127 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2128 * BITS_PER_UNIT));
2130 /* Copy the structure BITSIZE bites at a time.
2132 We could probably emit more efficient code for machines
2133 which do not use strict alignment, but it doesn't seem
2134 worth the effort at the current time. */
2135 for (bitpos = 0, xbitpos = big_endian_correction;
2136 bitpos < bytes * BITS_PER_UNIT;
2137 bitpos += bitsize, xbitpos += bitsize)
2140 /* We need a new source operand each time xbitpos is on a
2141 word boundary and when xbitpos == big_endian_correction
2142 (the first time through). */
2143 if (xbitpos % BITS_PER_WORD == 0
2144 || xbitpos == big_endian_correction)
2145 src = operand_subword_force (srcreg,
2146 xbitpos / BITS_PER_WORD,
2147 BLKmode);
2149 /* We need a new destination operand each time bitpos is on
2150 a word boundary. */
2151 if (bitpos % BITS_PER_WORD == 0)
2152 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2154 /* Use xbitpos for the source extraction (right justified) and
2155 xbitpos for the destination store (left justified). */
2156 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2157 extract_bit_field (src, bitsize,
2158 xbitpos % BITS_PER_WORD, 1,
2159 NULL_RTX, word_mode,
2160 word_mode,
2161 bitsize / BITS_PER_UNIT,
2162 BITS_PER_WORD),
2163 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2165 return tgtblk;
2169 /* Add a USE expression for REG to the (possibly empty) list pointed
2170 to by CALL_FUSAGE. REG must denote a hard register. */
2172 void
2173 use_reg (call_fusage, reg)
2174 rtx *call_fusage, reg;
2176 if (GET_CODE (reg) != REG
2177 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2178 abort();
2180 *call_fusage
2181 = gen_rtx_EXPR_LIST (VOIDmode,
2182 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2185 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2186 starting at REGNO. All of these registers must be hard registers. */
2188 void
2189 use_regs (call_fusage, regno, nregs)
2190 rtx *call_fusage;
2191 int regno;
2192 int nregs;
2194 int i;
2196 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2197 abort ();
2199 for (i = 0; i < nregs; i++)
2200 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2203 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2204 PARALLEL REGS. This is for calls that pass values in multiple
2205 non-contiguous locations. The Irix 6 ABI has examples of this. */
2207 void
2208 use_group_regs (call_fusage, regs)
2209 rtx *call_fusage;
2210 rtx regs;
2212 int i;
2214 for (i = 0; i < XVECLEN (regs, 0); i++)
2216 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2218 /* A NULL entry means the parameter goes both on the stack and in
2219 registers. This can also be a MEM for targets that pass values
2220 partially on the stack and partially in registers. */
2221 if (reg != 0 && GET_CODE (reg) == REG)
2222 use_reg (call_fusage, reg);
2226 /* Generate several move instructions to clear LEN bytes of block TO.
2227 (A MEM rtx with BLKmode). The caller must pass TO through
2228 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2229 we can assume. */
2231 static void
2232 clear_by_pieces (to, len, align)
2233 rtx to;
2234 int len;
2235 unsigned int align;
2237 struct clear_by_pieces data;
2238 rtx to_addr = XEXP (to, 0);
2239 int max_size = MOVE_MAX_PIECES + 1;
2240 enum machine_mode mode = VOIDmode, tmode;
2241 enum insn_code icode;
2243 data.offset = 0;
2244 data.to_addr = to_addr;
2245 data.to = to;
2246 data.autinc_to
2247 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2248 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2250 data.explicit_inc_to = 0;
2251 data.reverse
2252 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2253 if (data.reverse) data.offset = len;
2254 data.len = len;
2256 data.to_struct = MEM_IN_STRUCT_P (to);
2258 /* If copying requires more than two move insns,
2259 copy addresses to registers (to make displacements shorter)
2260 and use post-increment if available. */
2261 if (!data.autinc_to
2262 && move_by_pieces_ninsns (len, align) > 2)
2264 /* Determine the main mode we'll be using */
2265 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2266 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2267 if (GET_MODE_SIZE (tmode) < max_size)
2268 mode = tmode;
2270 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2272 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2273 data.autinc_to = 1;
2274 data.explicit_inc_to = -1;
2276 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
2278 data.to_addr = copy_addr_to_reg (to_addr);
2279 data.autinc_to = 1;
2280 data.explicit_inc_to = 1;
2282 if (!data.autinc_to && CONSTANT_P (to_addr))
2283 data.to_addr = copy_addr_to_reg (to_addr);
2286 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2287 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2288 align = MOVE_MAX;
2290 /* First move what we can in the largest integer mode, then go to
2291 successively smaller modes. */
2293 while (max_size > 1)
2295 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2296 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2297 if (GET_MODE_SIZE (tmode) < max_size)
2298 mode = tmode;
2300 if (mode == VOIDmode)
2301 break;
2303 icode = mov_optab->handlers[(int) mode].insn_code;
2304 if (icode != CODE_FOR_nothing
2305 && align >= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
2306 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2308 max_size = GET_MODE_SIZE (mode);
2311 /* The code above should have handled everything. */
2312 if (data.len != 0)
2313 abort ();
2316 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2317 with move instructions for mode MODE. GENFUN is the gen_... function
2318 to make a move insn for that mode. DATA has all the other info. */
2320 static void
2321 clear_by_pieces_1 (genfun, mode, data)
2322 rtx (*genfun) PARAMS ((rtx, ...));
2323 enum machine_mode mode;
2324 struct clear_by_pieces *data;
2326 register int size = GET_MODE_SIZE (mode);
2327 register rtx to1;
2329 while (data->len >= size)
2331 if (data->reverse) data->offset -= size;
2333 to1 = (data->autinc_to
2334 ? gen_rtx_MEM (mode, data->to_addr)
2335 : copy_rtx (change_address (data->to, mode,
2336 plus_constant (data->to_addr,
2337 data->offset))));
2338 MEM_IN_STRUCT_P (to1) = data->to_struct;
2340 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2341 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2343 emit_insn ((*genfun) (to1, const0_rtx));
2344 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2345 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2347 if (! data->reverse) data->offset += size;
2349 data->len -= size;
2353 /* Write zeros through the storage of OBJECT.
2354 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2355 the maximum alignment we can is has, measured in bytes.
2357 If we call a function that returns the length of the block, return it. */
2360 clear_storage (object, size, align)
2361 rtx object;
2362 rtx size;
2363 unsigned int align;
2365 #ifdef TARGET_MEM_FUNCTIONS
2366 static tree fn;
2367 tree call_expr, arg_list;
2368 #endif
2369 rtx retval = 0;
2371 if (GET_MODE (object) == BLKmode)
2373 object = protect_from_queue (object, 1);
2374 size = protect_from_queue (size, 0);
2376 if (GET_CODE (size) == CONST_INT
2377 && MOVE_BY_PIECES_P (INTVAL (size), align))
2378 clear_by_pieces (object, INTVAL (size), align);
2380 else
2382 /* Try the most limited insn first, because there's no point
2383 including more than one in the machine description unless
2384 the more limited one has some advantage. */
2386 rtx opalign = GEN_INT (align);
2387 enum machine_mode mode;
2389 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2390 mode = GET_MODE_WIDER_MODE (mode))
2392 enum insn_code code = clrstr_optab[(int) mode];
2393 insn_operand_predicate_fn pred;
2395 if (code != CODE_FOR_nothing
2396 /* We don't need MODE to be narrower than
2397 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2398 the mode mask, as it is returned by the macro, it will
2399 definitely be less than the actual mode mask. */
2400 && ((GET_CODE (size) == CONST_INT
2401 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2402 <= (GET_MODE_MASK (mode) >> 1)))
2403 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2404 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2405 || (*pred) (object, BLKmode))
2406 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2407 || (*pred) (opalign, VOIDmode)))
2409 rtx op1;
2410 rtx last = get_last_insn ();
2411 rtx pat;
2413 op1 = convert_to_mode (mode, size, 1);
2414 pred = insn_data[(int) code].operand[1].predicate;
2415 if (pred != 0 && ! (*pred) (op1, mode))
2416 op1 = copy_to_mode_reg (mode, op1);
2418 pat = GEN_FCN ((int) code) (object, op1, opalign);
2419 if (pat)
2421 emit_insn (pat);
2422 return 0;
2424 else
2425 delete_insns_since (last);
2429 /* OBJECT or SIZE may have been passed through protect_from_queue.
2431 It is unsafe to save the value generated by protect_from_queue
2432 and reuse it later. Consider what happens if emit_queue is
2433 called before the return value from protect_from_queue is used.
2435 Expansion of the CALL_EXPR below will call emit_queue before
2436 we are finished emitting RTL for argument setup. So if we are
2437 not careful we could get the wrong value for an argument.
2439 To avoid this problem we go ahead and emit code to copy OBJECT
2440 and SIZE into new pseudos. We can then place those new pseudos
2441 into an RTL_EXPR and use them later, even after a call to
2442 emit_queue.
2444 Note this is not strictly needed for library calls since they
2445 do not call emit_queue before loading their arguments. However,
2446 we may need to have library calls call emit_queue in the future
2447 since failing to do so could cause problems for targets which
2448 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2449 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2451 #ifdef TARGET_MEM_FUNCTIONS
2452 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2453 #else
2454 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2455 TREE_UNSIGNED (integer_type_node));
2456 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2457 #endif
2460 #ifdef TARGET_MEM_FUNCTIONS
2461 /* It is incorrect to use the libcall calling conventions to call
2462 memset in this context.
2464 This could be a user call to memset and the user may wish to
2465 examine the return value from memset.
2467 For targets where libcalls and normal calls have different
2468 conventions for returning pointers, we could end up generating
2469 incorrect code.
2471 So instead of using a libcall sequence we build up a suitable
2472 CALL_EXPR and expand the call in the normal fashion. */
2473 if (fn == NULL_TREE)
2475 tree fntype;
2477 /* This was copied from except.c, I don't know if all this is
2478 necessary in this context or not. */
2479 fn = get_identifier ("memset");
2480 push_obstacks_nochange ();
2481 end_temporary_allocation ();
2482 fntype = build_pointer_type (void_type_node);
2483 fntype = build_function_type (fntype, NULL_TREE);
2484 fn = build_decl (FUNCTION_DECL, fn, fntype);
2485 ggc_add_tree_root (&fn, 1);
2486 DECL_EXTERNAL (fn) = 1;
2487 TREE_PUBLIC (fn) = 1;
2488 DECL_ARTIFICIAL (fn) = 1;
2489 make_decl_rtl (fn, NULL_PTR, 1);
2490 assemble_external (fn);
2491 pop_obstacks ();
2494 /* We need to make an argument list for the function call.
2496 memset has three arguments, the first is a void * addresses, the
2497 second a integer with the initialization value, the last is a
2498 size_t byte count for the copy. */
2499 arg_list
2500 = build_tree_list (NULL_TREE,
2501 make_tree (build_pointer_type (void_type_node),
2502 object));
2503 TREE_CHAIN (arg_list)
2504 = build_tree_list (NULL_TREE,
2505 make_tree (integer_type_node, const0_rtx));
2506 TREE_CHAIN (TREE_CHAIN (arg_list))
2507 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2508 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2510 /* Now we have to build up the CALL_EXPR itself. */
2511 call_expr = build1 (ADDR_EXPR,
2512 build_pointer_type (TREE_TYPE (fn)), fn);
2513 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2514 call_expr, arg_list, NULL_TREE);
2515 TREE_SIDE_EFFECTS (call_expr) = 1;
2517 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2518 #else
2519 emit_library_call (bzero_libfunc, 0,
2520 VOIDmode, 2, object, Pmode, size,
2521 TYPE_MODE (integer_type_node));
2522 #endif
2525 else
2526 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2528 return retval;
2531 /* Generate code to copy Y into X.
2532 Both Y and X must have the same mode, except that
2533 Y can be a constant with VOIDmode.
2534 This mode cannot be BLKmode; use emit_block_move for that.
2536 Return the last instruction emitted. */
2539 emit_move_insn (x, y)
2540 rtx x, y;
2542 enum machine_mode mode = GET_MODE (x);
2544 x = protect_from_queue (x, 1);
2545 y = protect_from_queue (y, 0);
2547 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2548 abort ();
2550 /* Never force constant_p_rtx to memory. */
2551 if (GET_CODE (y) == CONSTANT_P_RTX)
2553 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2554 y = force_const_mem (mode, y);
2556 /* If X or Y are memory references, verify that their addresses are valid
2557 for the machine. */
2558 if (GET_CODE (x) == MEM
2559 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2560 && ! push_operand (x, GET_MODE (x)))
2561 || (flag_force_addr
2562 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2563 x = change_address (x, VOIDmode, XEXP (x, 0));
2565 if (GET_CODE (y) == MEM
2566 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2567 || (flag_force_addr
2568 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2569 y = change_address (y, VOIDmode, XEXP (y, 0));
2571 if (mode == BLKmode)
2572 abort ();
2574 return emit_move_insn_1 (x, y);
2577 /* Low level part of emit_move_insn.
2578 Called just like emit_move_insn, but assumes X and Y
2579 are basically valid. */
2582 emit_move_insn_1 (x, y)
2583 rtx x, y;
2585 enum machine_mode mode = GET_MODE (x);
2586 enum machine_mode submode;
2587 enum mode_class class = GET_MODE_CLASS (mode);
2588 int i;
2590 if (mode >= MAX_MACHINE_MODE)
2591 abort ();
2593 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2594 return
2595 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2597 /* Expand complex moves by moving real part and imag part, if possible. */
2598 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2599 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2600 * BITS_PER_UNIT),
2601 (class == MODE_COMPLEX_INT
2602 ? MODE_INT : MODE_FLOAT),
2604 && (mov_optab->handlers[(int) submode].insn_code
2605 != CODE_FOR_nothing))
2607 /* Don't split destination if it is a stack push. */
2608 int stack = push_operand (x, GET_MODE (x));
2610 /* If this is a stack, push the highpart first, so it
2611 will be in the argument order.
2613 In that case, change_address is used only to convert
2614 the mode, not to change the address. */
2615 if (stack)
2617 /* Note that the real part always precedes the imag part in memory
2618 regardless of machine's endianness. */
2619 #ifdef STACK_GROWS_DOWNWARD
2620 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2621 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2622 gen_imagpart (submode, y)));
2623 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2624 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2625 gen_realpart (submode, y)));
2626 #else
2627 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2628 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2629 gen_realpart (submode, y)));
2630 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2631 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2632 gen_imagpart (submode, y)));
2633 #endif
2635 else
2637 rtx realpart_x, realpart_y;
2638 rtx imagpart_x, imagpart_y;
2640 /* If this is a complex value with each part being smaller than a
2641 word, the usual calling sequence will likely pack the pieces into
2642 a single register. Unfortunately, SUBREG of hard registers only
2643 deals in terms of words, so we have a problem converting input
2644 arguments to the CONCAT of two registers that is used elsewhere
2645 for complex values. If this is before reload, we can copy it into
2646 memory and reload. FIXME, we should see about using extract and
2647 insert on integer registers, but complex short and complex char
2648 variables should be rarely used. */
2649 if (GET_MODE_BITSIZE (mode) < 2*BITS_PER_WORD
2650 && (reload_in_progress | reload_completed) == 0)
2652 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2653 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2655 if (packed_dest_p || packed_src_p)
2657 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2658 ? MODE_FLOAT : MODE_INT);
2660 enum machine_mode reg_mode =
2661 mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2663 if (reg_mode != BLKmode)
2665 rtx mem = assign_stack_temp (reg_mode,
2666 GET_MODE_SIZE (mode), 0);
2668 rtx cmem = change_address (mem, mode, NULL_RTX);
2670 cfun->cannot_inline = "function uses short complex types";
2672 if (packed_dest_p)
2674 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2675 emit_move_insn_1 (cmem, y);
2676 return emit_move_insn_1 (sreg, mem);
2678 else
2680 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2681 emit_move_insn_1 (mem, sreg);
2682 return emit_move_insn_1 (x, cmem);
2688 realpart_x = gen_realpart (submode, x);
2689 realpart_y = gen_realpart (submode, y);
2690 imagpart_x = gen_imagpart (submode, x);
2691 imagpart_y = gen_imagpart (submode, y);
2693 /* Show the output dies here. This is necessary for SUBREGs
2694 of pseudos since we cannot track their lifetimes correctly;
2695 hard regs shouldn't appear here except as return values.
2696 We never want to emit such a clobber after reload. */
2697 if (x != y
2698 && ! (reload_in_progress || reload_completed)
2699 && (GET_CODE (realpart_x) == SUBREG
2700 || GET_CODE (imagpart_x) == SUBREG))
2702 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2705 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2706 (realpart_x, realpart_y));
2707 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2708 (imagpart_x, imagpart_y));
2711 return get_last_insn ();
2714 /* This will handle any multi-word mode that lacks a move_insn pattern.
2715 However, you will get better code if you define such patterns,
2716 even if they must turn into multiple assembler instructions. */
2717 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2719 rtx last_insn = 0;
2720 rtx seq;
2721 int need_clobber;
2723 #ifdef PUSH_ROUNDING
2725 /* If X is a push on the stack, do the push now and replace
2726 X with a reference to the stack pointer. */
2727 if (push_operand (x, GET_MODE (x)))
2729 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2730 x = change_address (x, VOIDmode, stack_pointer_rtx);
2732 #endif
2734 start_sequence ();
2736 need_clobber = 0;
2737 for (i = 0;
2738 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2739 i++)
2741 rtx xpart = operand_subword (x, i, 1, mode);
2742 rtx ypart = operand_subword (y, i, 1, mode);
2744 /* If we can't get a part of Y, put Y into memory if it is a
2745 constant. Otherwise, force it into a register. If we still
2746 can't get a part of Y, abort. */
2747 if (ypart == 0 && CONSTANT_P (y))
2749 y = force_const_mem (mode, y);
2750 ypart = operand_subword (y, i, 1, mode);
2752 else if (ypart == 0)
2753 ypart = operand_subword_force (y, i, mode);
2755 if (xpart == 0 || ypart == 0)
2756 abort ();
2758 need_clobber |= (GET_CODE (xpart) == SUBREG);
2760 last_insn = emit_move_insn (xpart, ypart);
2763 seq = gen_sequence ();
2764 end_sequence ();
2766 /* Show the output dies here. This is necessary for SUBREGs
2767 of pseudos since we cannot track their lifetimes correctly;
2768 hard regs shouldn't appear here except as return values.
2769 We never want to emit such a clobber after reload. */
2770 if (x != y
2771 && ! (reload_in_progress || reload_completed)
2772 && need_clobber != 0)
2774 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2777 emit_insn (seq);
2779 return last_insn;
2781 else
2782 abort ();
2785 /* Pushing data onto the stack. */
2787 /* Push a block of length SIZE (perhaps variable)
2788 and return an rtx to address the beginning of the block.
2789 Note that it is not possible for the value returned to be a QUEUED.
2790 The value may be virtual_outgoing_args_rtx.
2792 EXTRA is the number of bytes of padding to push in addition to SIZE.
2793 BELOW nonzero means this padding comes at low addresses;
2794 otherwise, the padding comes at high addresses. */
2797 push_block (size, extra, below)
2798 rtx size;
2799 int extra, below;
2801 register rtx temp;
2803 size = convert_modes (Pmode, ptr_mode, size, 1);
2804 if (CONSTANT_P (size))
2805 anti_adjust_stack (plus_constant (size, extra));
2806 else if (GET_CODE (size) == REG && extra == 0)
2807 anti_adjust_stack (size);
2808 else
2810 rtx temp = copy_to_mode_reg (Pmode, size);
2811 if (extra != 0)
2812 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2813 temp, 0, OPTAB_LIB_WIDEN);
2814 anti_adjust_stack (temp);
2817 #if defined (STACK_GROWS_DOWNWARD) \
2818 || (defined (ARGS_GROW_DOWNWARD) \
2819 && !defined (ACCUMULATE_OUTGOING_ARGS))
2821 /* Return the lowest stack address when STACK or ARGS grow downward and
2822 we are not aaccumulating outgoing arguments (the c4x port uses such
2823 conventions). */
2824 temp = virtual_outgoing_args_rtx;
2825 if (extra != 0 && below)
2826 temp = plus_constant (temp, extra);
2827 #else
2828 if (GET_CODE (size) == CONST_INT)
2829 temp = plus_constant (virtual_outgoing_args_rtx,
2830 - INTVAL (size) - (below ? 0 : extra));
2831 else if (extra != 0 && !below)
2832 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2833 negate_rtx (Pmode, plus_constant (size, extra)));
2834 else
2835 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2836 negate_rtx (Pmode, size));
2837 #endif
2839 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2843 gen_push_operand ()
2845 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2848 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2849 block of SIZE bytes. */
2851 static rtx
2852 get_push_address (size)
2853 int size;
2855 register rtx temp;
2857 if (STACK_PUSH_CODE == POST_DEC)
2858 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2859 else if (STACK_PUSH_CODE == POST_INC)
2860 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2861 else
2862 temp = stack_pointer_rtx;
2864 return copy_to_reg (temp);
2867 /* Generate code to push X onto the stack, assuming it has mode MODE and
2868 type TYPE.
2869 MODE is redundant except when X is a CONST_INT (since they don't
2870 carry mode info).
2871 SIZE is an rtx for the size of data to be copied (in bytes),
2872 needed only if X is BLKmode.
2874 ALIGN (in bytes) is maximum alignment we can assume.
2876 If PARTIAL and REG are both nonzero, then copy that many of the first
2877 words of X into registers starting with REG, and push the rest of X.
2878 The amount of space pushed is decreased by PARTIAL words,
2879 rounded *down* to a multiple of PARM_BOUNDARY.
2880 REG must be a hard register in this case.
2881 If REG is zero but PARTIAL is not, take any all others actions for an
2882 argument partially in registers, but do not actually load any
2883 registers.
2885 EXTRA is the amount in bytes of extra space to leave next to this arg.
2886 This is ignored if an argument block has already been allocated.
2888 On a machine that lacks real push insns, ARGS_ADDR is the address of
2889 the bottom of the argument block for this call. We use indexing off there
2890 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2891 argument block has not been preallocated.
2893 ARGS_SO_FAR is the size of args previously pushed for this call.
2895 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2896 for arguments passed in registers. If nonzero, it will be the number
2897 of bytes required. */
2899 void
2900 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2901 args_addr, args_so_far, reg_parm_stack_space,
2902 alignment_pad)
2903 register rtx x;
2904 enum machine_mode mode;
2905 tree type;
2906 rtx size;
2907 unsigned int align;
2908 int partial;
2909 rtx reg;
2910 int extra;
2911 rtx args_addr;
2912 rtx args_so_far;
2913 int reg_parm_stack_space;
2914 rtx alignment_pad;
2916 rtx xinner;
2917 enum direction stack_direction
2918 #ifdef STACK_GROWS_DOWNWARD
2919 = downward;
2920 #else
2921 = upward;
2922 #endif
2924 /* Decide where to pad the argument: `downward' for below,
2925 `upward' for above, or `none' for don't pad it.
2926 Default is below for small data on big-endian machines; else above. */
2927 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2929 /* Invert direction if stack is post-update. */
2930 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2931 if (where_pad != none)
2932 where_pad = (where_pad == downward ? upward : downward);
2934 xinner = x = protect_from_queue (x, 0);
2936 if (mode == BLKmode)
2938 /* Copy a block into the stack, entirely or partially. */
2940 register rtx temp;
2941 int used = partial * UNITS_PER_WORD;
2942 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2943 int skip;
2945 if (size == 0)
2946 abort ();
2948 used -= offset;
2950 /* USED is now the # of bytes we need not copy to the stack
2951 because registers will take care of them. */
2953 if (partial != 0)
2954 xinner = change_address (xinner, BLKmode,
2955 plus_constant (XEXP (xinner, 0), used));
2957 /* If the partial register-part of the arg counts in its stack size,
2958 skip the part of stack space corresponding to the registers.
2959 Otherwise, start copying to the beginning of the stack space,
2960 by setting SKIP to 0. */
2961 skip = (reg_parm_stack_space == 0) ? 0 : used;
2963 #ifdef PUSH_ROUNDING
2964 /* Do it with several push insns if that doesn't take lots of insns
2965 and if there is no difficulty with push insns that skip bytes
2966 on the stack for alignment purposes. */
2967 if (args_addr == 0
2968 && GET_CODE (size) == CONST_INT
2969 && skip == 0
2970 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
2971 /* Here we avoid the case of a structure whose weak alignment
2972 forces many pushes of a small amount of data,
2973 and such small pushes do rounding that causes trouble. */
2974 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
2975 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2976 || PUSH_ROUNDING (align) == align)
2977 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2979 /* Push padding now if padding above and stack grows down,
2980 or if padding below and stack grows up.
2981 But if space already allocated, this has already been done. */
2982 if (extra && args_addr == 0
2983 && where_pad != none && where_pad != stack_direction)
2984 anti_adjust_stack (GEN_INT (extra));
2986 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
2987 INTVAL (size) - used, align);
2989 if (current_function_check_memory_usage && ! in_check_memory_usage)
2991 rtx temp;
2993 in_check_memory_usage = 1;
2994 temp = get_push_address (INTVAL(size) - used);
2995 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2996 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2997 temp, Pmode,
2998 XEXP (xinner, 0), Pmode,
2999 GEN_INT (INTVAL(size) - used),
3000 TYPE_MODE (sizetype));
3001 else
3002 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3003 temp, Pmode,
3004 GEN_INT (INTVAL(size) - used),
3005 TYPE_MODE (sizetype),
3006 GEN_INT (MEMORY_USE_RW),
3007 TYPE_MODE (integer_type_node));
3008 in_check_memory_usage = 0;
3011 else
3012 #endif /* PUSH_ROUNDING */
3014 /* Otherwise make space on the stack and copy the data
3015 to the address of that space. */
3017 /* Deduct words put into registers from the size we must copy. */
3018 if (partial != 0)
3020 if (GET_CODE (size) == CONST_INT)
3021 size = GEN_INT (INTVAL (size) - used);
3022 else
3023 size = expand_binop (GET_MODE (size), sub_optab, size,
3024 GEN_INT (used), NULL_RTX, 0,
3025 OPTAB_LIB_WIDEN);
3028 /* Get the address of the stack space.
3029 In this case, we do not deal with EXTRA separately.
3030 A single stack adjust will do. */
3031 if (! args_addr)
3033 temp = push_block (size, extra, where_pad == downward);
3034 extra = 0;
3036 else if (GET_CODE (args_so_far) == CONST_INT)
3037 temp = memory_address (BLKmode,
3038 plus_constant (args_addr,
3039 skip + INTVAL (args_so_far)));
3040 else
3041 temp = memory_address (BLKmode,
3042 plus_constant (gen_rtx_PLUS (Pmode,
3043 args_addr,
3044 args_so_far),
3045 skip));
3046 if (current_function_check_memory_usage && ! in_check_memory_usage)
3048 rtx target;
3050 in_check_memory_usage = 1;
3051 target = copy_to_reg (temp);
3052 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3053 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3054 target, Pmode,
3055 XEXP (xinner, 0), Pmode,
3056 size, TYPE_MODE (sizetype));
3057 else
3058 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3059 target, Pmode,
3060 size, TYPE_MODE (sizetype),
3061 GEN_INT (MEMORY_USE_RW),
3062 TYPE_MODE (integer_type_node));
3063 in_check_memory_usage = 0;
3066 /* TEMP is the address of the block. Copy the data there. */
3067 if (GET_CODE (size) == CONST_INT
3068 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3070 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
3071 INTVAL (size), align);
3072 goto ret;
3074 else
3076 rtx opalign = GEN_INT (align);
3077 enum machine_mode mode;
3078 rtx target = gen_rtx_MEM (BLKmode, temp);
3080 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3081 mode != VOIDmode;
3082 mode = GET_MODE_WIDER_MODE (mode))
3084 enum insn_code code = movstr_optab[(int) mode];
3085 insn_operand_predicate_fn pred;
3087 if (code != CODE_FOR_nothing
3088 && ((GET_CODE (size) == CONST_INT
3089 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3090 <= (GET_MODE_MASK (mode) >> 1)))
3091 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3092 && (!(pred = insn_data[(int) code].operand[0].predicate)
3093 || ((*pred) (target, BLKmode)))
3094 && (!(pred = insn_data[(int) code].operand[1].predicate)
3095 || ((*pred) (xinner, BLKmode)))
3096 && (!(pred = insn_data[(int) code].operand[3].predicate)
3097 || ((*pred) (opalign, VOIDmode))))
3099 rtx op2 = convert_to_mode (mode, size, 1);
3100 rtx last = get_last_insn ();
3101 rtx pat;
3103 pred = insn_data[(int) code].operand[2].predicate;
3104 if (pred != 0 && ! (*pred) (op2, mode))
3105 op2 = copy_to_mode_reg (mode, op2);
3107 pat = GEN_FCN ((int) code) (target, xinner,
3108 op2, opalign);
3109 if (pat)
3111 emit_insn (pat);
3112 goto ret;
3114 else
3115 delete_insns_since (last);
3120 #ifndef ACCUMULATE_OUTGOING_ARGS
3121 /* If the source is referenced relative to the stack pointer,
3122 copy it to another register to stabilize it. We do not need
3123 to do this if we know that we won't be changing sp. */
3125 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3126 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3127 temp = copy_to_reg (temp);
3128 #endif
3130 /* Make inhibit_defer_pop nonzero around the library call
3131 to force it to pop the bcopy-arguments right away. */
3132 NO_DEFER_POP;
3133 #ifdef TARGET_MEM_FUNCTIONS
3134 emit_library_call (memcpy_libfunc, 0,
3135 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3136 convert_to_mode (TYPE_MODE (sizetype),
3137 size, TREE_UNSIGNED (sizetype)),
3138 TYPE_MODE (sizetype));
3139 #else
3140 emit_library_call (bcopy_libfunc, 0,
3141 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3142 convert_to_mode (TYPE_MODE (integer_type_node),
3143 size,
3144 TREE_UNSIGNED (integer_type_node)),
3145 TYPE_MODE (integer_type_node));
3146 #endif
3147 OK_DEFER_POP;
3150 else if (partial > 0)
3152 /* Scalar partly in registers. */
3154 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3155 int i;
3156 int not_stack;
3157 /* # words of start of argument
3158 that we must make space for but need not store. */
3159 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3160 int args_offset = INTVAL (args_so_far);
3161 int skip;
3163 /* Push padding now if padding above and stack grows down,
3164 or if padding below and stack grows up.
3165 But if space already allocated, this has already been done. */
3166 if (extra && args_addr == 0
3167 && where_pad != none && where_pad != stack_direction)
3168 anti_adjust_stack (GEN_INT (extra));
3170 /* If we make space by pushing it, we might as well push
3171 the real data. Otherwise, we can leave OFFSET nonzero
3172 and leave the space uninitialized. */
3173 if (args_addr == 0)
3174 offset = 0;
3176 /* Now NOT_STACK gets the number of words that we don't need to
3177 allocate on the stack. */
3178 not_stack = partial - offset;
3180 /* If the partial register-part of the arg counts in its stack size,
3181 skip the part of stack space corresponding to the registers.
3182 Otherwise, start copying to the beginning of the stack space,
3183 by setting SKIP to 0. */
3184 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3186 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3187 x = validize_mem (force_const_mem (mode, x));
3189 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3190 SUBREGs of such registers are not allowed. */
3191 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3192 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3193 x = copy_to_reg (x);
3195 /* Loop over all the words allocated on the stack for this arg. */
3196 /* We can do it by words, because any scalar bigger than a word
3197 has a size a multiple of a word. */
3198 #ifndef PUSH_ARGS_REVERSED
3199 for (i = not_stack; i < size; i++)
3200 #else
3201 for (i = size - 1; i >= not_stack; i--)
3202 #endif
3203 if (i >= not_stack + offset)
3204 emit_push_insn (operand_subword_force (x, i, mode),
3205 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3206 0, args_addr,
3207 GEN_INT (args_offset + ((i - not_stack + skip)
3208 * UNITS_PER_WORD)),
3209 reg_parm_stack_space, alignment_pad);
3211 else
3213 rtx addr;
3214 rtx target = NULL_RTX;
3216 /* Push padding now if padding above and stack grows down,
3217 or if padding below and stack grows up.
3218 But if space already allocated, this has already been done. */
3219 if (extra && args_addr == 0
3220 && where_pad != none && where_pad != stack_direction)
3221 anti_adjust_stack (GEN_INT (extra));
3223 #ifdef PUSH_ROUNDING
3224 if (args_addr == 0)
3225 addr = gen_push_operand ();
3226 else
3227 #endif
3229 if (GET_CODE (args_so_far) == CONST_INT)
3230 addr
3231 = memory_address (mode,
3232 plus_constant (args_addr,
3233 INTVAL (args_so_far)));
3234 else
3235 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3236 args_so_far));
3237 target = addr;
3240 emit_move_insn (gen_rtx_MEM (mode, addr), x);
3242 if (current_function_check_memory_usage && ! in_check_memory_usage)
3244 in_check_memory_usage = 1;
3245 if (target == 0)
3246 target = get_push_address (GET_MODE_SIZE (mode));
3248 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3249 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3250 target, Pmode,
3251 XEXP (x, 0), Pmode,
3252 GEN_INT (GET_MODE_SIZE (mode)),
3253 TYPE_MODE (sizetype));
3254 else
3255 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3256 target, Pmode,
3257 GEN_INT (GET_MODE_SIZE (mode)),
3258 TYPE_MODE (sizetype),
3259 GEN_INT (MEMORY_USE_RW),
3260 TYPE_MODE (integer_type_node));
3261 in_check_memory_usage = 0;
3265 ret:
3266 /* If part should go in registers, copy that part
3267 into the appropriate registers. Do this now, at the end,
3268 since mem-to-mem copies above may do function calls. */
3269 if (partial > 0 && reg != 0)
3271 /* Handle calls that pass values in multiple non-contiguous locations.
3272 The Irix 6 ABI has examples of this. */
3273 if (GET_CODE (reg) == PARALLEL)
3274 emit_group_load (reg, x, -1, align); /* ??? size? */
3275 else
3276 move_block_to_reg (REGNO (reg), x, partial, mode);
3279 if (extra && args_addr == 0 && where_pad == stack_direction)
3280 anti_adjust_stack (GEN_INT (extra));
3282 if (alignment_pad)
3283 anti_adjust_stack (alignment_pad);
3286 /* Expand an assignment that stores the value of FROM into TO.
3287 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3288 (This may contain a QUEUED rtx;
3289 if the value is constant, this rtx is a constant.)
3290 Otherwise, the returned value is NULL_RTX.
3292 SUGGEST_REG is no longer actually used.
3293 It used to mean, copy the value through a register
3294 and return that register, if that is possible.
3295 We now use WANT_VALUE to decide whether to do this. */
3298 expand_assignment (to, from, want_value, suggest_reg)
3299 tree to, from;
3300 int want_value;
3301 int suggest_reg ATTRIBUTE_UNUSED;
3303 register rtx to_rtx = 0;
3304 rtx result;
3306 /* Don't crash if the lhs of the assignment was erroneous. */
3308 if (TREE_CODE (to) == ERROR_MARK)
3310 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3311 return want_value ? result : NULL_RTX;
3314 /* Assignment of a structure component needs special treatment
3315 if the structure component's rtx is not simply a MEM.
3316 Assignment of an array element at a constant index, and assignment of
3317 an array element in an unaligned packed structure field, has the same
3318 problem. */
3320 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3321 || TREE_CODE (to) == ARRAY_REF)
3323 enum machine_mode mode1;
3324 int bitsize;
3325 int bitpos;
3326 tree offset;
3327 int unsignedp;
3328 int volatilep = 0;
3329 tree tem;
3330 unsigned int alignment;
3332 push_temp_slots ();
3333 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3334 &unsignedp, &volatilep, &alignment);
3336 /* If we are going to use store_bit_field and extract_bit_field,
3337 make sure to_rtx will be safe for multiple use. */
3339 if (mode1 == VOIDmode && want_value)
3340 tem = stabilize_reference (tem);
3342 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3343 if (offset != 0)
3345 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3347 if (GET_CODE (to_rtx) != MEM)
3348 abort ();
3350 if (GET_MODE (offset_rtx) != ptr_mode)
3352 #ifdef POINTERS_EXTEND_UNSIGNED
3353 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3354 #else
3355 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3356 #endif
3359 /* A constant address in TO_RTX can have VOIDmode, we must not try
3360 to call force_reg for that case. Avoid that case. */
3361 if (GET_CODE (to_rtx) == MEM
3362 && GET_MODE (to_rtx) == BLKmode
3363 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3364 && bitsize
3365 && (bitpos % bitsize) == 0
3366 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3367 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
3369 rtx temp = change_address (to_rtx, mode1,
3370 plus_constant (XEXP (to_rtx, 0),
3371 (bitpos /
3372 BITS_PER_UNIT)));
3373 if (GET_CODE (XEXP (temp, 0)) == REG)
3374 to_rtx = temp;
3375 else
3376 to_rtx = change_address (to_rtx, mode1,
3377 force_reg (GET_MODE (XEXP (temp, 0)),
3378 XEXP (temp, 0)));
3379 bitpos = 0;
3382 to_rtx = change_address (to_rtx, VOIDmode,
3383 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3384 force_reg (ptr_mode,
3385 offset_rtx)));
3388 if (volatilep)
3390 if (GET_CODE (to_rtx) == MEM)
3392 /* When the offset is zero, to_rtx is the address of the
3393 structure we are storing into, and hence may be shared.
3394 We must make a new MEM before setting the volatile bit. */
3395 if (offset == 0)
3396 to_rtx = copy_rtx (to_rtx);
3398 MEM_VOLATILE_P (to_rtx) = 1;
3400 #if 0 /* This was turned off because, when a field is volatile
3401 in an object which is not volatile, the object may be in a register,
3402 and then we would abort over here. */
3403 else
3404 abort ();
3405 #endif
3408 if (TREE_CODE (to) == COMPONENT_REF
3409 && TREE_READONLY (TREE_OPERAND (to, 1)))
3411 if (offset == 0)
3412 to_rtx = copy_rtx (to_rtx);
3414 RTX_UNCHANGING_P (to_rtx) = 1;
3417 /* Check the access. */
3418 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3420 rtx to_addr;
3421 int size;
3422 int best_mode_size;
3423 enum machine_mode best_mode;
3425 best_mode = get_best_mode (bitsize, bitpos,
3426 TYPE_ALIGN (TREE_TYPE (tem)),
3427 mode1, volatilep);
3428 if (best_mode == VOIDmode)
3429 best_mode = QImode;
3431 best_mode_size = GET_MODE_BITSIZE (best_mode);
3432 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3433 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3434 size *= GET_MODE_SIZE (best_mode);
3436 /* Check the access right of the pointer. */
3437 if (size)
3438 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3439 to_addr, Pmode,
3440 GEN_INT (size), TYPE_MODE (sizetype),
3441 GEN_INT (MEMORY_USE_WO),
3442 TYPE_MODE (integer_type_node));
3445 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3446 (want_value
3447 /* Spurious cast makes HPUX compiler happy. */
3448 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
3449 : VOIDmode),
3450 unsignedp,
3451 /* Required alignment of containing datum. */
3452 alignment,
3453 int_size_in_bytes (TREE_TYPE (tem)),
3454 get_alias_set (to));
3455 preserve_temp_slots (result);
3456 free_temp_slots ();
3457 pop_temp_slots ();
3459 /* If the value is meaningful, convert RESULT to the proper mode.
3460 Otherwise, return nothing. */
3461 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3462 TYPE_MODE (TREE_TYPE (from)),
3463 result,
3464 TREE_UNSIGNED (TREE_TYPE (to)))
3465 : NULL_RTX);
3468 /* If the rhs is a function call and its value is not an aggregate,
3469 call the function before we start to compute the lhs.
3470 This is needed for correct code for cases such as
3471 val = setjmp (buf) on machines where reference to val
3472 requires loading up part of an address in a separate insn.
3474 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3475 a promoted variable where the zero- or sign- extension needs to be done.
3476 Handling this in the normal way is safe because no computation is done
3477 before the call. */
3478 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3479 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3480 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
3482 rtx value;
3484 push_temp_slots ();
3485 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3486 if (to_rtx == 0)
3487 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3489 /* Handle calls that return values in multiple non-contiguous locations.
3490 The Irix 6 ABI has examples of this. */
3491 if (GET_CODE (to_rtx) == PARALLEL)
3492 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3493 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3494 else if (GET_MODE (to_rtx) == BLKmode)
3495 emit_block_move (to_rtx, value, expr_size (from),
3496 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3497 else
3499 #ifdef POINTERS_EXTEND_UNSIGNED
3500 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3501 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3502 value = convert_memory_address (GET_MODE (to_rtx), value);
3503 #endif
3504 emit_move_insn (to_rtx, value);
3506 preserve_temp_slots (to_rtx);
3507 free_temp_slots ();
3508 pop_temp_slots ();
3509 return want_value ? to_rtx : NULL_RTX;
3512 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3513 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3515 if (to_rtx == 0)
3517 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3518 if (GET_CODE (to_rtx) == MEM)
3519 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3522 /* Don't move directly into a return register. */
3523 if (TREE_CODE (to) == RESULT_DECL
3524 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3526 rtx temp;
3528 push_temp_slots ();
3529 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3531 if (GET_CODE (to_rtx) == PARALLEL)
3532 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3533 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3534 else
3535 emit_move_insn (to_rtx, temp);
3537 preserve_temp_slots (to_rtx);
3538 free_temp_slots ();
3539 pop_temp_slots ();
3540 return want_value ? to_rtx : NULL_RTX;
3543 /* In case we are returning the contents of an object which overlaps
3544 the place the value is being stored, use a safe function when copying
3545 a value through a pointer into a structure value return block. */
3546 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3547 && current_function_returns_struct
3548 && !current_function_returns_pcc_struct)
3550 rtx from_rtx, size;
3552 push_temp_slots ();
3553 size = expr_size (from);
3554 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3555 EXPAND_MEMORY_USE_DONT);
3557 /* Copy the rights of the bitmap. */
3558 if (current_function_check_memory_usage)
3559 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3560 XEXP (to_rtx, 0), Pmode,
3561 XEXP (from_rtx, 0), Pmode,
3562 convert_to_mode (TYPE_MODE (sizetype),
3563 size, TREE_UNSIGNED (sizetype)),
3564 TYPE_MODE (sizetype));
3566 #ifdef TARGET_MEM_FUNCTIONS
3567 emit_library_call (memcpy_libfunc, 0,
3568 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3569 XEXP (from_rtx, 0), Pmode,
3570 convert_to_mode (TYPE_MODE (sizetype),
3571 size, TREE_UNSIGNED (sizetype)),
3572 TYPE_MODE (sizetype));
3573 #else
3574 emit_library_call (bcopy_libfunc, 0,
3575 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3576 XEXP (to_rtx, 0), Pmode,
3577 convert_to_mode (TYPE_MODE (integer_type_node),
3578 size, TREE_UNSIGNED (integer_type_node)),
3579 TYPE_MODE (integer_type_node));
3580 #endif
3582 preserve_temp_slots (to_rtx);
3583 free_temp_slots ();
3584 pop_temp_slots ();
3585 return want_value ? to_rtx : NULL_RTX;
3588 /* Compute FROM and store the value in the rtx we got. */
3590 push_temp_slots ();
3591 result = store_expr (from, to_rtx, want_value);
3592 preserve_temp_slots (result);
3593 free_temp_slots ();
3594 pop_temp_slots ();
3595 return want_value ? result : NULL_RTX;
3598 /* Generate code for computing expression EXP,
3599 and storing the value into TARGET.
3600 TARGET may contain a QUEUED rtx.
3602 If WANT_VALUE is nonzero, return a copy of the value
3603 not in TARGET, so that we can be sure to use the proper
3604 value in a containing expression even if TARGET has something
3605 else stored in it. If possible, we copy the value through a pseudo
3606 and return that pseudo. Or, if the value is constant, we try to
3607 return the constant. In some cases, we return a pseudo
3608 copied *from* TARGET.
3610 If the mode is BLKmode then we may return TARGET itself.
3611 It turns out that in BLKmode it doesn't cause a problem.
3612 because C has no operators that could combine two different
3613 assignments into the same BLKmode object with different values
3614 with no sequence point. Will other languages need this to
3615 be more thorough?
3617 If WANT_VALUE is 0, we return NULL, to make sure
3618 to catch quickly any cases where the caller uses the value
3619 and fails to set WANT_VALUE. */
3622 store_expr (exp, target, want_value)
3623 register tree exp;
3624 register rtx target;
3625 int want_value;
3627 register rtx temp;
3628 int dont_return_target = 0;
3630 if (TREE_CODE (exp) == COMPOUND_EXPR)
3632 /* Perform first part of compound expression, then assign from second
3633 part. */
3634 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3635 emit_queue ();
3636 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3638 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3640 /* For conditional expression, get safe form of the target. Then
3641 test the condition, doing the appropriate assignment on either
3642 side. This avoids the creation of unnecessary temporaries.
3643 For non-BLKmode, it is more efficient not to do this. */
3645 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3647 emit_queue ();
3648 target = protect_from_queue (target, 1);
3650 do_pending_stack_adjust ();
3651 NO_DEFER_POP;
3652 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3653 start_cleanup_deferral ();
3654 store_expr (TREE_OPERAND (exp, 1), target, 0);
3655 end_cleanup_deferral ();
3656 emit_queue ();
3657 emit_jump_insn (gen_jump (lab2));
3658 emit_barrier ();
3659 emit_label (lab1);
3660 start_cleanup_deferral ();
3661 store_expr (TREE_OPERAND (exp, 2), target, 0);
3662 end_cleanup_deferral ();
3663 emit_queue ();
3664 emit_label (lab2);
3665 OK_DEFER_POP;
3667 return want_value ? target : NULL_RTX;
3669 else if (queued_subexp_p (target))
3670 /* If target contains a postincrement, let's not risk
3671 using it as the place to generate the rhs. */
3673 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3675 /* Expand EXP into a new pseudo. */
3676 temp = gen_reg_rtx (GET_MODE (target));
3677 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3679 else
3680 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3682 /* If target is volatile, ANSI requires accessing the value
3683 *from* the target, if it is accessed. So make that happen.
3684 In no case return the target itself. */
3685 if (! MEM_VOLATILE_P (target) && want_value)
3686 dont_return_target = 1;
3688 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3689 && GET_MODE (target) != BLKmode)
3690 /* If target is in memory and caller wants value in a register instead,
3691 arrange that. Pass TARGET as target for expand_expr so that,
3692 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3693 We know expand_expr will not use the target in that case.
3694 Don't do this if TARGET is volatile because we are supposed
3695 to write it and then read it. */
3697 temp = expand_expr (exp, target, GET_MODE (target), 0);
3698 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3699 temp = copy_to_reg (temp);
3700 dont_return_target = 1;
3702 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3703 /* If this is an scalar in a register that is stored in a wider mode
3704 than the declared mode, compute the result into its declared mode
3705 and then convert to the wider mode. Our value is the computed
3706 expression. */
3708 /* If we don't want a value, we can do the conversion inside EXP,
3709 which will often result in some optimizations. Do the conversion
3710 in two steps: first change the signedness, if needed, then
3711 the extend. But don't do this if the type of EXP is a subtype
3712 of something else since then the conversion might involve
3713 more than just converting modes. */
3714 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3715 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3717 if (TREE_UNSIGNED (TREE_TYPE (exp))
3718 != SUBREG_PROMOTED_UNSIGNED_P (target))
3720 = convert
3721 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3722 TREE_TYPE (exp)),
3723 exp);
3725 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3726 SUBREG_PROMOTED_UNSIGNED_P (target)),
3727 exp);
3730 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3732 /* If TEMP is a volatile MEM and we want a result value, make
3733 the access now so it gets done only once. Likewise if
3734 it contains TARGET. */
3735 if (GET_CODE (temp) == MEM && want_value
3736 && (MEM_VOLATILE_P (temp)
3737 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3738 temp = copy_to_reg (temp);
3740 /* If TEMP is a VOIDmode constant, use convert_modes to make
3741 sure that we properly convert it. */
3742 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3743 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3744 TYPE_MODE (TREE_TYPE (exp)), temp,
3745 SUBREG_PROMOTED_UNSIGNED_P (target));
3747 convert_move (SUBREG_REG (target), temp,
3748 SUBREG_PROMOTED_UNSIGNED_P (target));
3750 /* If we promoted a constant, change the mode back down to match
3751 target. Otherwise, the caller might get confused by a result whose
3752 mode is larger than expected. */
3754 if (want_value && GET_MODE (temp) != GET_MODE (target)
3755 && GET_MODE (temp) != VOIDmode)
3757 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
3758 SUBREG_PROMOTED_VAR_P (temp) = 1;
3759 SUBREG_PROMOTED_UNSIGNED_P (temp)
3760 = SUBREG_PROMOTED_UNSIGNED_P (target);
3763 return want_value ? temp : NULL_RTX;
3765 else
3767 temp = expand_expr (exp, target, GET_MODE (target), 0);
3768 /* Return TARGET if it's a specified hardware register.
3769 If TARGET is a volatile mem ref, either return TARGET
3770 or return a reg copied *from* TARGET; ANSI requires this.
3772 Otherwise, if TEMP is not TARGET, return TEMP
3773 if it is constant (for efficiency),
3774 or if we really want the correct value. */
3775 if (!(target && GET_CODE (target) == REG
3776 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3777 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3778 && ! rtx_equal_p (temp, target)
3779 && (CONSTANT_P (temp) || want_value))
3780 dont_return_target = 1;
3783 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3784 the same as that of TARGET, adjust the constant. This is needed, for
3785 example, in case it is a CONST_DOUBLE and we want only a word-sized
3786 value. */
3787 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3788 && TREE_CODE (exp) != ERROR_MARK
3789 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3790 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3791 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3793 if (current_function_check_memory_usage
3794 && GET_CODE (target) == MEM
3795 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3797 if (GET_CODE (temp) == MEM)
3798 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3799 XEXP (target, 0), Pmode,
3800 XEXP (temp, 0), Pmode,
3801 expr_size (exp), TYPE_MODE (sizetype));
3802 else
3803 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3804 XEXP (target, 0), Pmode,
3805 expr_size (exp), TYPE_MODE (sizetype),
3806 GEN_INT (MEMORY_USE_WO),
3807 TYPE_MODE (integer_type_node));
3810 /* If value was not generated in the target, store it there.
3811 Convert the value to TARGET's type first if nec. */
3812 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3813 one or both of them are volatile memory refs, we have to distinguish
3814 two cases:
3815 - expand_expr has used TARGET. In this case, we must not generate
3816 another copy. This can be detected by TARGET being equal according
3817 to == .
3818 - expand_expr has not used TARGET - that means that the source just
3819 happens to have the same RTX form. Since temp will have been created
3820 by expand_expr, it will compare unequal according to == .
3821 We must generate a copy in this case, to reach the correct number
3822 of volatile memory references. */
3824 if ((! rtx_equal_p (temp, target)
3825 || (temp != target && (side_effects_p (temp)
3826 || side_effects_p (target))))
3827 && TREE_CODE (exp) != ERROR_MARK)
3829 target = protect_from_queue (target, 1);
3830 if (GET_MODE (temp) != GET_MODE (target)
3831 && GET_MODE (temp) != VOIDmode)
3833 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3834 if (dont_return_target)
3836 /* In this case, we will return TEMP,
3837 so make sure it has the proper mode.
3838 But don't forget to store the value into TARGET. */
3839 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3840 emit_move_insn (target, temp);
3842 else
3843 convert_move (target, temp, unsignedp);
3846 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3848 /* Handle copying a string constant into an array.
3849 The string constant may be shorter than the array.
3850 So copy just the string's actual length, and clear the rest. */
3851 rtx size;
3852 rtx addr;
3854 /* Get the size of the data type of the string,
3855 which is actually the size of the target. */
3856 size = expr_size (exp);
3857 if (GET_CODE (size) == CONST_INT
3858 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3859 emit_block_move (target, temp, size,
3860 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3861 else
3863 /* Compute the size of the data to copy from the string. */
3864 tree copy_size
3865 = size_binop (MIN_EXPR,
3866 make_tree (sizetype, size),
3867 convert (sizetype,
3868 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3869 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3870 VOIDmode, 0);
3871 rtx label = 0;
3873 /* Copy that much. */
3874 emit_block_move (target, temp, copy_size_rtx,
3875 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3877 /* Figure out how much is left in TARGET that we have to clear.
3878 Do all calculations in ptr_mode. */
3880 addr = XEXP (target, 0);
3881 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3883 if (GET_CODE (copy_size_rtx) == CONST_INT)
3885 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3886 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3888 else
3890 addr = force_reg (ptr_mode, addr);
3891 addr = expand_binop (ptr_mode, add_optab, addr,
3892 copy_size_rtx, NULL_RTX, 0,
3893 OPTAB_LIB_WIDEN);
3895 size = expand_binop (ptr_mode, sub_optab, size,
3896 copy_size_rtx, NULL_RTX, 0,
3897 OPTAB_LIB_WIDEN);
3899 label = gen_label_rtx ();
3900 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
3901 GET_MODE (size), 0, 0, label);
3904 if (size != const0_rtx)
3906 /* Be sure we can write on ADDR. */
3907 if (current_function_check_memory_usage)
3908 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3909 addr, Pmode,
3910 size, TYPE_MODE (sizetype),
3911 GEN_INT (MEMORY_USE_WO),
3912 TYPE_MODE (integer_type_node));
3913 #ifdef TARGET_MEM_FUNCTIONS
3914 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3915 addr, ptr_mode,
3916 const0_rtx, TYPE_MODE (integer_type_node),
3917 convert_to_mode (TYPE_MODE (sizetype),
3918 size,
3919 TREE_UNSIGNED (sizetype)),
3920 TYPE_MODE (sizetype));
3921 #else
3922 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3923 addr, ptr_mode,
3924 convert_to_mode (TYPE_MODE (integer_type_node),
3925 size,
3926 TREE_UNSIGNED (integer_type_node)),
3927 TYPE_MODE (integer_type_node));
3928 #endif
3931 if (label)
3932 emit_label (label);
3935 /* Handle calls that return values in multiple non-contiguous locations.
3936 The Irix 6 ABI has examples of this. */
3937 else if (GET_CODE (target) == PARALLEL)
3938 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3939 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3940 else if (GET_MODE (temp) == BLKmode)
3941 emit_block_move (target, temp, expr_size (exp),
3942 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3943 else
3944 emit_move_insn (target, temp);
3947 /* If we don't want a value, return NULL_RTX. */
3948 if (! want_value)
3949 return NULL_RTX;
3951 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3952 ??? The latter test doesn't seem to make sense. */
3953 else if (dont_return_target && GET_CODE (temp) != MEM)
3954 return temp;
3956 /* Return TARGET itself if it is a hard register. */
3957 else if (want_value && GET_MODE (target) != BLKmode
3958 && ! (GET_CODE (target) == REG
3959 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3960 return copy_to_reg (target);
3962 else
3963 return target;
3966 /* Return 1 if EXP just contains zeros. */
3968 static int
3969 is_zeros_p (exp)
3970 tree exp;
3972 tree elt;
3974 switch (TREE_CODE (exp))
3976 case CONVERT_EXPR:
3977 case NOP_EXPR:
3978 case NON_LVALUE_EXPR:
3979 return is_zeros_p (TREE_OPERAND (exp, 0));
3981 case INTEGER_CST:
3982 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3984 case COMPLEX_CST:
3985 return
3986 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3988 case REAL_CST:
3989 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
3991 case CONSTRUCTOR:
3992 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3993 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3994 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3995 if (! is_zeros_p (TREE_VALUE (elt)))
3996 return 0;
3998 return 1;
4000 default:
4001 return 0;
4005 /* Return 1 if EXP contains mostly (3/4) zeros. */
4007 static int
4008 mostly_zeros_p (exp)
4009 tree exp;
4011 if (TREE_CODE (exp) == CONSTRUCTOR)
4013 int elts = 0, zeros = 0;
4014 tree elt = CONSTRUCTOR_ELTS (exp);
4015 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4017 /* If there are no ranges of true bits, it is all zero. */
4018 return elt == NULL_TREE;
4020 for (; elt; elt = TREE_CHAIN (elt))
4022 /* We do not handle the case where the index is a RANGE_EXPR,
4023 so the statistic will be somewhat inaccurate.
4024 We do make a more accurate count in store_constructor itself,
4025 so since this function is only used for nested array elements,
4026 this should be close enough. */
4027 if (mostly_zeros_p (TREE_VALUE (elt)))
4028 zeros++;
4029 elts++;
4032 return 4 * zeros >= 3 * elts;
4035 return is_zeros_p (exp);
4038 /* Helper function for store_constructor.
4039 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4040 TYPE is the type of the CONSTRUCTOR, not the element type.
4041 ALIGN and CLEARED are as for store_constructor.
4043 This provides a recursive shortcut back to store_constructor when it isn't
4044 necessary to go through store_field. This is so that we can pass through
4045 the cleared field to let store_constructor know that we may not have to
4046 clear a substructure if the outer structure has already been cleared. */
4048 static void
4049 store_constructor_field (target, bitsize, bitpos,
4050 mode, exp, type, align, cleared)
4051 rtx target;
4052 int bitsize, bitpos;
4053 enum machine_mode mode;
4054 tree exp, type;
4055 unsigned int align;
4056 int cleared;
4058 if (TREE_CODE (exp) == CONSTRUCTOR
4059 && bitpos % BITS_PER_UNIT == 0
4060 /* If we have a non-zero bitpos for a register target, then we just
4061 let store_field do the bitfield handling. This is unlikely to
4062 generate unnecessary clear instructions anyways. */
4063 && (bitpos == 0 || GET_CODE (target) == MEM))
4065 if (bitpos != 0)
4066 target
4067 = change_address (target,
4068 GET_MODE (target) == BLKmode
4069 || 0 != (bitpos
4070 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4071 ? BLKmode : VOIDmode,
4072 plus_constant (XEXP (target, 0),
4073 bitpos / BITS_PER_UNIT));
4074 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4076 else
4077 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0,
4078 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT,
4079 int_size_in_bytes (type), 0);
4082 /* Store the value of constructor EXP into the rtx TARGET.
4083 TARGET is either a REG or a MEM.
4084 ALIGN is the maximum known alignment for TARGET, in bits.
4085 CLEARED is true if TARGET is known to have been zero'd.
4086 SIZE is the number of bytes of TARGET we are allowed to modify: this
4087 may not be the same as the size of EXP if we are assigning to a field
4088 which has been packed to exclude padding bits. */
4090 static void
4091 store_constructor (exp, target, align, cleared, size)
4092 tree exp;
4093 rtx target;
4094 unsigned int align;
4095 int cleared;
4096 int size;
4098 tree type = TREE_TYPE (exp);
4099 #ifdef WORD_REGISTER_OPERATIONS
4100 rtx exp_size = expr_size (exp);
4101 #endif
4103 /* We know our target cannot conflict, since safe_from_p has been called. */
4104 #if 0
4105 /* Don't try copying piece by piece into a hard register
4106 since that is vulnerable to being clobbered by EXP.
4107 Instead, construct in a pseudo register and then copy it all. */
4108 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4110 rtx temp = gen_reg_rtx (GET_MODE (target));
4111 store_constructor (exp, temp, align, cleared, size);
4112 emit_move_insn (target, temp);
4113 return;
4115 #endif
4117 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4118 || TREE_CODE (type) == QUAL_UNION_TYPE)
4120 register tree elt;
4122 /* Inform later passes that the whole union value is dead. */
4123 if ((TREE_CODE (type) == UNION_TYPE
4124 || TREE_CODE (type) == QUAL_UNION_TYPE)
4125 && ! cleared)
4127 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4129 /* If the constructor is empty, clear the union. */
4130 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4131 clear_storage (target, expr_size (exp),
4132 TYPE_ALIGN (type) / BITS_PER_UNIT);
4135 /* If we are building a static constructor into a register,
4136 set the initial value as zero so we can fold the value into
4137 a constant. But if more than one register is involved,
4138 this probably loses. */
4139 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4140 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4142 if (! cleared)
4143 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4145 cleared = 1;
4148 /* If the constructor has fewer fields than the structure
4149 or if we are initializing the structure to mostly zeros,
4150 clear the whole structure first. */
4151 else if (size > 0
4152 && ((list_length (CONSTRUCTOR_ELTS (exp))
4153 != list_length (TYPE_FIELDS (type)))
4154 || mostly_zeros_p (exp)))
4156 if (! cleared)
4157 clear_storage (target, GEN_INT (size),
4158 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
4160 cleared = 1;
4162 else if (! cleared)
4163 /* Inform later passes that the old value is dead. */
4164 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4166 /* Store each element of the constructor into
4167 the corresponding field of TARGET. */
4169 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4171 register tree field = TREE_PURPOSE (elt);
4172 #ifdef WORD_REGISTER_OPERATIONS
4173 tree value = TREE_VALUE (elt);
4174 #endif
4175 register enum machine_mode mode;
4176 int bitsize;
4177 int bitpos = 0;
4178 int unsignedp;
4179 tree pos, constant = 0, offset = 0;
4180 rtx to_rtx = target;
4182 /* Just ignore missing fields.
4183 We cleared the whole structure, above,
4184 if any fields are missing. */
4185 if (field == 0)
4186 continue;
4188 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4189 continue;
4191 if (TREE_CODE (DECL_SIZE (field)) == INTEGER_CST)
4192 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
4193 else
4194 bitsize = -1;
4196 unsignedp = TREE_UNSIGNED (field);
4197 mode = DECL_MODE (field);
4198 if (DECL_BIT_FIELD (field))
4199 mode = VOIDmode;
4201 pos = DECL_FIELD_BITPOS (field);
4202 if (TREE_CODE (pos) == INTEGER_CST)
4203 constant = pos;
4204 else if (TREE_CODE (pos) == PLUS_EXPR
4205 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4206 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
4207 else
4208 offset = pos;
4210 if (constant)
4211 bitpos = TREE_INT_CST_LOW (constant);
4213 if (offset)
4215 rtx offset_rtx;
4217 if (contains_placeholder_p (offset))
4218 offset = build (WITH_RECORD_EXPR, sizetype,
4219 offset, make_tree (TREE_TYPE (exp), target));
4221 offset = size_binop (EXACT_DIV_EXPR, offset,
4222 size_int (BITS_PER_UNIT));
4224 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4225 if (GET_CODE (to_rtx) != MEM)
4226 abort ();
4228 if (GET_MODE (offset_rtx) != ptr_mode)
4230 #ifdef POINTERS_EXTEND_UNSIGNED
4231 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4232 #else
4233 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4234 #endif
4237 to_rtx
4238 = change_address (to_rtx, VOIDmode,
4239 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4240 force_reg (ptr_mode,
4241 offset_rtx)));
4244 if (TREE_READONLY (field))
4246 if (GET_CODE (to_rtx) == MEM)
4247 to_rtx = copy_rtx (to_rtx);
4249 RTX_UNCHANGING_P (to_rtx) = 1;
4252 #ifdef WORD_REGISTER_OPERATIONS
4253 /* If this initializes a field that is smaller than a word, at the
4254 start of a word, try to widen it to a full word.
4255 This special case allows us to output C++ member function
4256 initializations in a form that the optimizers can understand. */
4257 if (constant
4258 && GET_CODE (target) == REG
4259 && bitsize < BITS_PER_WORD
4260 && bitpos % BITS_PER_WORD == 0
4261 && GET_MODE_CLASS (mode) == MODE_INT
4262 && TREE_CODE (value) == INTEGER_CST
4263 && GET_CODE (exp_size) == CONST_INT
4264 && bitpos + BITS_PER_WORD <= INTVAL (exp_size) * BITS_PER_UNIT)
4266 tree type = TREE_TYPE (value);
4267 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4269 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4270 value = convert (type, value);
4272 if (BYTES_BIG_ENDIAN)
4273 value
4274 = fold (build (LSHIFT_EXPR, type, value,
4275 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4276 bitsize = BITS_PER_WORD;
4277 mode = word_mode;
4279 #endif
4280 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4281 TREE_VALUE (elt), type,
4282 MIN (align,
4283 DECL_ALIGN (TREE_PURPOSE (elt))),
4284 cleared);
4287 else if (TREE_CODE (type) == ARRAY_TYPE)
4289 register tree elt;
4290 register int i;
4291 int need_to_clear;
4292 tree domain = TYPE_DOMAIN (type);
4293 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4294 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4295 tree elttype = TREE_TYPE (type);
4297 /* If the constructor has fewer elements than the array,
4298 clear the whole array first. Similarly if this is
4299 static constructor of a non-BLKmode object. */
4300 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4301 need_to_clear = 1;
4302 else
4304 HOST_WIDE_INT count = 0, zero_count = 0;
4305 need_to_clear = 0;
4306 /* This loop is a more accurate version of the loop in
4307 mostly_zeros_p (it handles RANGE_EXPR in an index).
4308 It is also needed to check for missing elements. */
4309 for (elt = CONSTRUCTOR_ELTS (exp);
4310 elt != NULL_TREE;
4311 elt = TREE_CHAIN (elt))
4313 tree index = TREE_PURPOSE (elt);
4314 HOST_WIDE_INT this_node_count;
4315 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4317 tree lo_index = TREE_OPERAND (index, 0);
4318 tree hi_index = TREE_OPERAND (index, 1);
4319 if (TREE_CODE (lo_index) != INTEGER_CST
4320 || TREE_CODE (hi_index) != INTEGER_CST)
4322 need_to_clear = 1;
4323 break;
4325 this_node_count = TREE_INT_CST_LOW (hi_index)
4326 - TREE_INT_CST_LOW (lo_index) + 1;
4328 else
4329 this_node_count = 1;
4330 count += this_node_count;
4331 if (mostly_zeros_p (TREE_VALUE (elt)))
4332 zero_count += this_node_count;
4334 /* Clear the entire array first if there are any missing elements,
4335 or if the incidence of zero elements is >= 75%. */
4336 if (count < maxelt - minelt + 1
4337 || 4 * zero_count >= 3 * count)
4338 need_to_clear = 1;
4340 if (need_to_clear && size > 0)
4342 if (! cleared)
4343 clear_storage (target, GEN_INT (size),
4344 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
4345 cleared = 1;
4347 else
4348 /* Inform later passes that the old value is dead. */
4349 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4351 /* Store each element of the constructor into
4352 the corresponding element of TARGET, determined
4353 by counting the elements. */
4354 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4355 elt;
4356 elt = TREE_CHAIN (elt), i++)
4358 register enum machine_mode mode;
4359 int bitsize;
4360 int bitpos;
4361 int unsignedp;
4362 tree value = TREE_VALUE (elt);
4363 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4364 tree index = TREE_PURPOSE (elt);
4365 rtx xtarget = target;
4367 if (cleared && is_zeros_p (value))
4368 continue;
4370 unsignedp = TREE_UNSIGNED (elttype);
4371 mode = TYPE_MODE (elttype);
4372 if (mode == BLKmode)
4374 if (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4375 && TREE_INT_CST_HIGH (TYPE_SIZE (elttype)) == 0)
4376 bitsize = TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4377 else
4378 bitsize = -1;
4380 else
4381 bitsize = GET_MODE_BITSIZE (mode);
4383 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4385 tree lo_index = TREE_OPERAND (index, 0);
4386 tree hi_index = TREE_OPERAND (index, 1);
4387 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4388 struct nesting *loop;
4389 HOST_WIDE_INT lo, hi, count;
4390 tree position;
4392 /* If the range is constant and "small", unroll the loop. */
4393 if (TREE_CODE (lo_index) == INTEGER_CST
4394 && TREE_CODE (hi_index) == INTEGER_CST
4395 && (lo = TREE_INT_CST_LOW (lo_index),
4396 hi = TREE_INT_CST_LOW (hi_index),
4397 count = hi - lo + 1,
4398 (GET_CODE (target) != MEM
4399 || count <= 2
4400 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4401 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
4402 <= 40 * 8))))
4404 lo -= minelt; hi -= minelt;
4405 for (; lo <= hi; lo++)
4407 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4408 store_constructor_field (target, bitsize, bitpos, mode,
4409 value, type, align, cleared);
4412 else
4414 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4415 loop_top = gen_label_rtx ();
4416 loop_end = gen_label_rtx ();
4418 unsignedp = TREE_UNSIGNED (domain);
4420 index = build_decl (VAR_DECL, NULL_TREE, domain);
4422 DECL_RTL (index) = index_r
4423 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4424 &unsignedp, 0));
4426 if (TREE_CODE (value) == SAVE_EXPR
4427 && SAVE_EXPR_RTL (value) == 0)
4429 /* Make sure value gets expanded once before the
4430 loop. */
4431 expand_expr (value, const0_rtx, VOIDmode, 0);
4432 emit_queue ();
4434 store_expr (lo_index, index_r, 0);
4435 loop = expand_start_loop (0);
4437 /* Assign value to element index. */
4438 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4439 size_int (BITS_PER_UNIT));
4440 position = size_binop (MULT_EXPR,
4441 size_binop (MINUS_EXPR, index,
4442 TYPE_MIN_VALUE (domain)),
4443 position);
4444 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4445 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4446 xtarget = change_address (target, mode, addr);
4447 if (TREE_CODE (value) == CONSTRUCTOR)
4448 store_constructor (value, xtarget, align, cleared,
4449 bitsize / BITS_PER_UNIT);
4450 else
4451 store_expr (value, xtarget, 0);
4453 expand_exit_loop_if_false (loop,
4454 build (LT_EXPR, integer_type_node,
4455 index, hi_index));
4457 expand_increment (build (PREINCREMENT_EXPR,
4458 TREE_TYPE (index),
4459 index, integer_one_node), 0, 0);
4460 expand_end_loop ();
4461 emit_label (loop_end);
4463 /* Needed by stupid register allocation. to extend the
4464 lifetime of pseudo-regs used by target past the end
4465 of the loop. */
4466 emit_insn (gen_rtx_USE (GET_MODE (target), target));
4469 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
4470 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
4472 rtx pos_rtx, addr;
4473 tree position;
4475 if (index == 0)
4476 index = size_int (i);
4478 if (minelt)
4479 index = size_binop (MINUS_EXPR, index,
4480 TYPE_MIN_VALUE (domain));
4481 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4482 size_int (BITS_PER_UNIT));
4483 position = size_binop (MULT_EXPR, index, position);
4484 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4485 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4486 xtarget = change_address (target, mode, addr);
4487 store_expr (value, xtarget, 0);
4489 else
4491 if (index != 0)
4492 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
4493 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4494 else
4495 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4496 store_constructor_field (target, bitsize, bitpos, mode, value,
4497 type, align, cleared);
4501 /* set constructor assignments */
4502 else if (TREE_CODE (type) == SET_TYPE)
4504 tree elt = CONSTRUCTOR_ELTS (exp);
4505 int nbytes = int_size_in_bytes (type), nbits;
4506 tree domain = TYPE_DOMAIN (type);
4507 tree domain_min, domain_max, bitlength;
4509 /* The default implementation strategy is to extract the constant
4510 parts of the constructor, use that to initialize the target,
4511 and then "or" in whatever non-constant ranges we need in addition.
4513 If a large set is all zero or all ones, it is
4514 probably better to set it using memset (if available) or bzero.
4515 Also, if a large set has just a single range, it may also be
4516 better to first clear all the first clear the set (using
4517 bzero/memset), and set the bits we want. */
4519 /* Check for all zeros. */
4520 if (elt == NULL_TREE && size > 0)
4522 if (!cleared)
4523 clear_storage (target, GEN_INT (size),
4524 TYPE_ALIGN (type) / BITS_PER_UNIT);
4525 return;
4528 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4529 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4530 bitlength = size_binop (PLUS_EXPR,
4531 size_binop (MINUS_EXPR, domain_max, domain_min),
4532 size_one_node);
4534 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
4535 abort ();
4536 nbits = TREE_INT_CST_LOW (bitlength);
4538 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4539 are "complicated" (more than one range), initialize (the
4540 constant parts) by copying from a constant. */
4541 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4542 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4544 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4545 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4546 char *bit_buffer = (char *) alloca (nbits);
4547 HOST_WIDE_INT word = 0;
4548 int bit_pos = 0;
4549 int ibit = 0;
4550 int offset = 0; /* In bytes from beginning of set. */
4551 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4552 for (;;)
4554 if (bit_buffer[ibit])
4556 if (BYTES_BIG_ENDIAN)
4557 word |= (1 << (set_word_size - 1 - bit_pos));
4558 else
4559 word |= 1 << bit_pos;
4561 bit_pos++; ibit++;
4562 if (bit_pos >= set_word_size || ibit == nbits)
4564 if (word != 0 || ! cleared)
4566 rtx datum = GEN_INT (word);
4567 rtx to_rtx;
4568 /* The assumption here is that it is safe to use
4569 XEXP if the set is multi-word, but not if
4570 it's single-word. */
4571 if (GET_CODE (target) == MEM)
4573 to_rtx = plus_constant (XEXP (target, 0), offset);
4574 to_rtx = change_address (target, mode, to_rtx);
4576 else if (offset == 0)
4577 to_rtx = target;
4578 else
4579 abort ();
4580 emit_move_insn (to_rtx, datum);
4582 if (ibit == nbits)
4583 break;
4584 word = 0;
4585 bit_pos = 0;
4586 offset += set_word_size / BITS_PER_UNIT;
4590 else if (!cleared)
4592 /* Don't bother clearing storage if the set is all ones. */
4593 if (TREE_CHAIN (elt) != NULL_TREE
4594 || (TREE_PURPOSE (elt) == NULL_TREE
4595 ? nbits != 1
4596 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4597 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4598 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
4599 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4600 != nbits))))
4601 clear_storage (target, expr_size (exp),
4602 TYPE_ALIGN (type) / BITS_PER_UNIT);
4605 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4607 /* start of range of element or NULL */
4608 tree startbit = TREE_PURPOSE (elt);
4609 /* end of range of element, or element value */
4610 tree endbit = TREE_VALUE (elt);
4611 #ifdef TARGET_MEM_FUNCTIONS
4612 HOST_WIDE_INT startb, endb;
4613 #endif
4614 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4616 bitlength_rtx = expand_expr (bitlength,
4617 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4619 /* handle non-range tuple element like [ expr ] */
4620 if (startbit == NULL_TREE)
4622 startbit = save_expr (endbit);
4623 endbit = startbit;
4625 startbit = convert (sizetype, startbit);
4626 endbit = convert (sizetype, endbit);
4627 if (! integer_zerop (domain_min))
4629 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4630 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4632 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4633 EXPAND_CONST_ADDRESS);
4634 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4635 EXPAND_CONST_ADDRESS);
4637 if (REG_P (target))
4639 targetx = assign_stack_temp (GET_MODE (target),
4640 GET_MODE_SIZE (GET_MODE (target)),
4642 emit_move_insn (targetx, target);
4644 else if (GET_CODE (target) == MEM)
4645 targetx = target;
4646 else
4647 abort ();
4649 #ifdef TARGET_MEM_FUNCTIONS
4650 /* Optimization: If startbit and endbit are
4651 constants divisible by BITS_PER_UNIT,
4652 call memset instead. */
4653 if (TREE_CODE (startbit) == INTEGER_CST
4654 && TREE_CODE (endbit) == INTEGER_CST
4655 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4656 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4658 emit_library_call (memset_libfunc, 0,
4659 VOIDmode, 3,
4660 plus_constant (XEXP (targetx, 0),
4661 startb / BITS_PER_UNIT),
4662 Pmode,
4663 constm1_rtx, TYPE_MODE (integer_type_node),
4664 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4665 TYPE_MODE (sizetype));
4667 else
4668 #endif
4670 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4671 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4672 bitlength_rtx, TYPE_MODE (sizetype),
4673 startbit_rtx, TYPE_MODE (sizetype),
4674 endbit_rtx, TYPE_MODE (sizetype));
4676 if (REG_P (target))
4677 emit_move_insn (target, targetx);
4681 else
4682 abort ();
4685 /* Store the value of EXP (an expression tree)
4686 into a subfield of TARGET which has mode MODE and occupies
4687 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4688 If MODE is VOIDmode, it means that we are storing into a bit-field.
4690 If VALUE_MODE is VOIDmode, return nothing in particular.
4691 UNSIGNEDP is not used in this case.
4693 Otherwise, return an rtx for the value stored. This rtx
4694 has mode VALUE_MODE if that is convenient to do.
4695 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4697 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4698 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4700 ALIAS_SET is the alias set for the destination. This value will
4701 (in general) be different from that for TARGET, since TARGET is a
4702 reference to the containing structure. */
4704 static rtx
4705 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4706 unsignedp, align, total_size, alias_set)
4707 rtx target;
4708 int bitsize, bitpos;
4709 enum machine_mode mode;
4710 tree exp;
4711 enum machine_mode value_mode;
4712 int unsignedp;
4713 unsigned int align;
4714 int total_size;
4715 int alias_set;
4717 HOST_WIDE_INT width_mask = 0;
4719 if (TREE_CODE (exp) == ERROR_MARK)
4720 return const0_rtx;
4722 if (bitsize < HOST_BITS_PER_WIDE_INT)
4723 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4725 /* If we are storing into an unaligned field of an aligned union that is
4726 in a register, we may have the mode of TARGET being an integer mode but
4727 MODE == BLKmode. In that case, get an aligned object whose size and
4728 alignment are the same as TARGET and store TARGET into it (we can avoid
4729 the store if the field being stored is the entire width of TARGET). Then
4730 call ourselves recursively to store the field into a BLKmode version of
4731 that object. Finally, load from the object into TARGET. This is not
4732 very efficient in general, but should only be slightly more expensive
4733 than the otherwise-required unaligned accesses. Perhaps this can be
4734 cleaned up later. */
4736 if (mode == BLKmode
4737 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4739 rtx object = assign_stack_temp (GET_MODE (target),
4740 GET_MODE_SIZE (GET_MODE (target)), 0);
4741 rtx blk_object = copy_rtx (object);
4743 MEM_SET_IN_STRUCT_P (object, 1);
4744 MEM_SET_IN_STRUCT_P (blk_object, 1);
4745 PUT_MODE (blk_object, BLKmode);
4747 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4748 emit_move_insn (object, target);
4750 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4751 align, total_size, alias_set);
4753 /* Even though we aren't returning target, we need to
4754 give it the updated value. */
4755 emit_move_insn (target, object);
4757 return blk_object;
4760 /* If the structure is in a register or if the component
4761 is a bit field, we cannot use addressing to access it.
4762 Use bit-field techniques or SUBREG to store in it. */
4764 if (mode == VOIDmode
4765 || (mode != BLKmode && ! direct_store[(int) mode]
4766 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4767 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4768 || GET_CODE (target) == REG
4769 || GET_CODE (target) == SUBREG
4770 /* If the field isn't aligned enough to store as an ordinary memref,
4771 store it as a bit field. */
4772 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4773 && (align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode)
4774 || bitpos % GET_MODE_ALIGNMENT (mode)))
4775 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4776 && (TYPE_ALIGN (TREE_TYPE (exp)) > align * BITS_PER_UNIT
4777 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
4778 /* If the RHS and field are a constant size and the size of the
4779 RHS isn't the same size as the bitfield, we must use bitfield
4780 operations. */
4781 || ((bitsize >= 0
4782 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST)
4783 && (TREE_INT_CST_HIGH (TYPE_SIZE (TREE_TYPE (exp))) != 0
4784 || TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))) != bitsize)))
4786 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4788 /* If BITSIZE is narrower than the size of the type of EXP
4789 we will be narrowing TEMP. Normally, what's wanted are the
4790 low-order bits. However, if EXP's type is a record and this is
4791 big-endian machine, we want the upper BITSIZE bits. */
4792 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4793 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4794 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4795 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4796 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4797 - bitsize),
4798 temp, 1);
4800 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4801 MODE. */
4802 if (mode != VOIDmode && mode != BLKmode
4803 && mode != TYPE_MODE (TREE_TYPE (exp)))
4804 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4806 /* If the modes of TARGET and TEMP are both BLKmode, both
4807 must be in memory and BITPOS must be aligned on a byte
4808 boundary. If so, we simply do a block copy. */
4809 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4811 unsigned int exp_align = expr_align (exp) / BITS_PER_UNIT;
4813 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4814 || bitpos % BITS_PER_UNIT != 0)
4815 abort ();
4817 target = change_address (target, VOIDmode,
4818 plus_constant (XEXP (target, 0),
4819 bitpos / BITS_PER_UNIT));
4821 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
4822 align = MIN (exp_align, align);
4824 /* Find an alignment that is consistent with the bit position. */
4825 while ((bitpos % (align * BITS_PER_UNIT)) != 0)
4826 align >>= 1;
4828 emit_block_move (target, temp,
4829 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4830 / BITS_PER_UNIT),
4831 align);
4833 return value_mode == VOIDmode ? const0_rtx : target;
4836 /* Store the value in the bitfield. */
4837 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4838 if (value_mode != VOIDmode)
4840 /* The caller wants an rtx for the value. */
4841 /* If possible, avoid refetching from the bitfield itself. */
4842 if (width_mask != 0
4843 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4845 tree count;
4846 enum machine_mode tmode;
4848 if (unsignedp)
4849 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4850 tmode = GET_MODE (temp);
4851 if (tmode == VOIDmode)
4852 tmode = value_mode;
4853 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4854 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4855 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4857 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4858 NULL_RTX, value_mode, 0, align,
4859 total_size);
4861 return const0_rtx;
4863 else
4865 rtx addr = XEXP (target, 0);
4866 rtx to_rtx;
4868 /* If a value is wanted, it must be the lhs;
4869 so make the address stable for multiple use. */
4871 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4872 && ! CONSTANT_ADDRESS_P (addr)
4873 /* A frame-pointer reference is already stable. */
4874 && ! (GET_CODE (addr) == PLUS
4875 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4876 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4877 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4878 addr = copy_to_reg (addr);
4880 /* Now build a reference to just the desired component. */
4882 to_rtx = copy_rtx (change_address (target, mode,
4883 plus_constant (addr,
4884 (bitpos
4885 / BITS_PER_UNIT))));
4886 MEM_SET_IN_STRUCT_P (to_rtx, 1);
4887 MEM_ALIAS_SET (to_rtx) = alias_set;
4889 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4893 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4894 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4895 ARRAY_REFs and find the ultimate containing object, which we return.
4897 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4898 bit position, and *PUNSIGNEDP to the signedness of the field.
4899 If the position of the field is variable, we store a tree
4900 giving the variable offset (in units) in *POFFSET.
4901 This offset is in addition to the bit position.
4902 If the position is not variable, we store 0 in *POFFSET.
4903 We set *PALIGNMENT to the alignment in bytes of the address that will be
4904 computed. This is the alignment of the thing we return if *POFFSET
4905 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4907 If any of the extraction expressions is volatile,
4908 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4910 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4911 is a mode that can be used to access the field. In that case, *PBITSIZE
4912 is redundant.
4914 If the field describes a variable-sized object, *PMODE is set to
4915 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4916 this case, but the address of the object can be found. */
4918 tree
4919 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4920 punsignedp, pvolatilep, palignment)
4921 tree exp;
4922 int *pbitsize;
4923 int *pbitpos;
4924 tree *poffset;
4925 enum machine_mode *pmode;
4926 int *punsignedp;
4927 int *pvolatilep;
4928 unsigned int *palignment;
4930 tree orig_exp = exp;
4931 tree size_tree = 0;
4932 enum machine_mode mode = VOIDmode;
4933 tree offset = integer_zero_node;
4934 unsigned int alignment = BIGGEST_ALIGNMENT;
4936 if (TREE_CODE (exp) == COMPONENT_REF)
4938 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4939 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4940 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4941 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4943 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4945 size_tree = TREE_OPERAND (exp, 1);
4946 *punsignedp = TREE_UNSIGNED (exp);
4948 else
4950 mode = TYPE_MODE (TREE_TYPE (exp));
4951 if (mode == BLKmode)
4952 size_tree = TYPE_SIZE (TREE_TYPE (exp));
4954 *pbitsize = GET_MODE_BITSIZE (mode);
4955 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4958 if (size_tree)
4960 if (TREE_CODE (size_tree) != INTEGER_CST)
4961 mode = BLKmode, *pbitsize = -1;
4962 else
4963 *pbitsize = TREE_INT_CST_LOW (size_tree);
4966 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4967 and find the ultimate containing object. */
4969 *pbitpos = 0;
4971 while (1)
4973 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4975 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4976 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4977 : TREE_OPERAND (exp, 2));
4978 tree constant = integer_zero_node, var = pos;
4980 /* If this field hasn't been filled in yet, don't go
4981 past it. This should only happen when folding expressions
4982 made during type construction. */
4983 if (pos == 0)
4984 break;
4986 /* Assume here that the offset is a multiple of a unit.
4987 If not, there should be an explicitly added constant. */
4988 if (TREE_CODE (pos) == PLUS_EXPR
4989 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4990 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4991 else if (TREE_CODE (pos) == INTEGER_CST)
4992 constant = pos, var = integer_zero_node;
4994 *pbitpos += TREE_INT_CST_LOW (constant);
4995 offset = size_binop (PLUS_EXPR, offset,
4996 size_binop (EXACT_DIV_EXPR, var,
4997 size_int (BITS_PER_UNIT)));
5000 else if (TREE_CODE (exp) == ARRAY_REF)
5002 /* This code is based on the code in case ARRAY_REF in expand_expr
5003 below. We assume here that the size of an array element is
5004 always an integral multiple of BITS_PER_UNIT. */
5006 tree index = TREE_OPERAND (exp, 1);
5007 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5008 tree low_bound
5009 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5010 tree index_type = TREE_TYPE (index);
5011 tree xindex;
5013 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
5015 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
5016 index);
5017 index_type = TREE_TYPE (index);
5020 /* Optimize the special-case of a zero lower bound.
5022 We convert the low_bound to sizetype to avoid some problems
5023 with constant folding. (E.g. suppose the lower bound is 1,
5024 and its mode is QI. Without the conversion, (ARRAY
5025 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5026 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5028 But sizetype isn't quite right either (especially if
5029 the lowbound is negative). FIXME */
5031 if (! integer_zerop (low_bound))
5032 index = fold (build (MINUS_EXPR, index_type, index,
5033 convert (sizetype, low_bound)));
5035 if (TREE_CODE (index) == INTEGER_CST)
5037 index = convert (sbitsizetype, index);
5038 index_type = TREE_TYPE (index);
5041 xindex = fold (build (MULT_EXPR, sbitsizetype, index,
5042 convert (sbitsizetype,
5043 TYPE_SIZE (TREE_TYPE (exp)))));
5045 if (TREE_CODE (xindex) == INTEGER_CST
5046 && TREE_INT_CST_HIGH (xindex) == 0)
5047 *pbitpos += TREE_INT_CST_LOW (xindex);
5048 else
5050 /* Either the bit offset calculated above is not constant, or
5051 it overflowed. In either case, redo the multiplication
5052 against the size in units. This is especially important
5053 in the non-constant case to avoid a division at runtime. */
5054 xindex = fold (build (MULT_EXPR, ssizetype, index,
5055 convert (ssizetype,
5056 TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
5058 if (contains_placeholder_p (xindex))
5059 xindex = build (WITH_RECORD_EXPR, sizetype, xindex, exp);
5061 offset = size_binop (PLUS_EXPR, offset, xindex);
5064 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5065 && ! ((TREE_CODE (exp) == NOP_EXPR
5066 || TREE_CODE (exp) == CONVERT_EXPR)
5067 && (TYPE_MODE (TREE_TYPE (exp))
5068 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5069 break;
5071 /* If any reference in the chain is volatile, the effect is volatile. */
5072 if (TREE_THIS_VOLATILE (exp))
5073 *pvolatilep = 1;
5075 /* If the offset is non-constant already, then we can't assume any
5076 alignment more than the alignment here. */
5077 if (! integer_zerop (offset))
5078 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5080 exp = TREE_OPERAND (exp, 0);
5083 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
5084 alignment = MIN (alignment, DECL_ALIGN (exp));
5085 else if (TREE_TYPE (exp) != 0)
5086 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5088 if (integer_zerop (offset))
5089 offset = 0;
5091 if (offset != 0 && contains_placeholder_p (offset))
5092 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
5094 *pmode = mode;
5095 *poffset = offset;
5096 *palignment = alignment / BITS_PER_UNIT;
5097 return exp;
5100 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5101 static enum memory_use_mode
5102 get_memory_usage_from_modifier (modifier)
5103 enum expand_modifier modifier;
5105 switch (modifier)
5107 case EXPAND_NORMAL:
5108 case EXPAND_SUM:
5109 return MEMORY_USE_RO;
5110 break;
5111 case EXPAND_MEMORY_USE_WO:
5112 return MEMORY_USE_WO;
5113 break;
5114 case EXPAND_MEMORY_USE_RW:
5115 return MEMORY_USE_RW;
5116 break;
5117 case EXPAND_MEMORY_USE_DONT:
5118 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5119 MEMORY_USE_DONT, because they are modifiers to a call of
5120 expand_expr in the ADDR_EXPR case of expand_expr. */
5121 case EXPAND_CONST_ADDRESS:
5122 case EXPAND_INITIALIZER:
5123 return MEMORY_USE_DONT;
5124 case EXPAND_MEMORY_USE_BAD:
5125 default:
5126 abort ();
5130 /* Given an rtx VALUE that may contain additions and multiplications,
5131 return an equivalent value that just refers to a register or memory.
5132 This is done by generating instructions to perform the arithmetic
5133 and returning a pseudo-register containing the value.
5135 The returned value may be a REG, SUBREG, MEM or constant. */
5138 force_operand (value, target)
5139 rtx value, target;
5141 register optab binoptab = 0;
5142 /* Use a temporary to force order of execution of calls to
5143 `force_operand'. */
5144 rtx tmp;
5145 register rtx op2;
5146 /* Use subtarget as the target for operand 0 of a binary operation. */
5147 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5149 /* Check for a PIC address load. */
5150 if (flag_pic
5151 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5152 && XEXP (value, 0) == pic_offset_table_rtx
5153 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5154 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5155 || GET_CODE (XEXP (value, 1)) == CONST))
5157 if (!subtarget)
5158 subtarget = gen_reg_rtx (GET_MODE (value));
5159 emit_move_insn (subtarget, value);
5160 return subtarget;
5163 if (GET_CODE (value) == PLUS)
5164 binoptab = add_optab;
5165 else if (GET_CODE (value) == MINUS)
5166 binoptab = sub_optab;
5167 else if (GET_CODE (value) == MULT)
5169 op2 = XEXP (value, 1);
5170 if (!CONSTANT_P (op2)
5171 && !(GET_CODE (op2) == REG && op2 != subtarget))
5172 subtarget = 0;
5173 tmp = force_operand (XEXP (value, 0), subtarget);
5174 return expand_mult (GET_MODE (value), tmp,
5175 force_operand (op2, NULL_RTX),
5176 target, 0);
5179 if (binoptab)
5181 op2 = XEXP (value, 1);
5182 if (!CONSTANT_P (op2)
5183 && !(GET_CODE (op2) == REG && op2 != subtarget))
5184 subtarget = 0;
5185 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5187 binoptab = add_optab;
5188 op2 = negate_rtx (GET_MODE (value), op2);
5191 /* Check for an addition with OP2 a constant integer and our first
5192 operand a PLUS of a virtual register and something else. In that
5193 case, we want to emit the sum of the virtual register and the
5194 constant first and then add the other value. This allows virtual
5195 register instantiation to simply modify the constant rather than
5196 creating another one around this addition. */
5197 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5198 && GET_CODE (XEXP (value, 0)) == PLUS
5199 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5200 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5201 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5203 rtx temp = expand_binop (GET_MODE (value), binoptab,
5204 XEXP (XEXP (value, 0), 0), op2,
5205 subtarget, 0, OPTAB_LIB_WIDEN);
5206 return expand_binop (GET_MODE (value), binoptab, temp,
5207 force_operand (XEXP (XEXP (value, 0), 1), 0),
5208 target, 0, OPTAB_LIB_WIDEN);
5211 tmp = force_operand (XEXP (value, 0), subtarget);
5212 return expand_binop (GET_MODE (value), binoptab, tmp,
5213 force_operand (op2, NULL_RTX),
5214 target, 0, OPTAB_LIB_WIDEN);
5215 /* We give UNSIGNEDP = 0 to expand_binop
5216 because the only operations we are expanding here are signed ones. */
5218 return value;
5221 /* Subroutine of expand_expr:
5222 save the non-copied parts (LIST) of an expr (LHS), and return a list
5223 which can restore these values to their previous values,
5224 should something modify their storage. */
5226 static tree
5227 save_noncopied_parts (lhs, list)
5228 tree lhs;
5229 tree list;
5231 tree tail;
5232 tree parts = 0;
5234 for (tail = list; tail; tail = TREE_CHAIN (tail))
5235 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5236 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5237 else
5239 tree part = TREE_VALUE (tail);
5240 tree part_type = TREE_TYPE (part);
5241 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5242 rtx target = assign_temp (part_type, 0, 1, 1);
5243 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5244 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5245 parts = tree_cons (to_be_saved,
5246 build (RTL_EXPR, part_type, NULL_TREE,
5247 (tree) target),
5248 parts);
5249 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5251 return parts;
5254 /* Subroutine of expand_expr:
5255 record the non-copied parts (LIST) of an expr (LHS), and return a list
5256 which specifies the initial values of these parts. */
5258 static tree
5259 init_noncopied_parts (lhs, list)
5260 tree lhs;
5261 tree list;
5263 tree tail;
5264 tree parts = 0;
5266 for (tail = list; tail; tail = TREE_CHAIN (tail))
5267 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5268 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5269 else if (TREE_PURPOSE (tail))
5271 tree part = TREE_VALUE (tail);
5272 tree part_type = TREE_TYPE (part);
5273 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5274 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5276 return parts;
5279 /* Subroutine of expand_expr: return nonzero iff there is no way that
5280 EXP can reference X, which is being modified. TOP_P is nonzero if this
5281 call is going to be used to determine whether we need a temporary
5282 for EXP, as opposed to a recursive call to this function.
5284 It is always safe for this routine to return zero since it merely
5285 searches for optimization opportunities. */
5287 static int
5288 safe_from_p (x, exp, top_p)
5289 rtx x;
5290 tree exp;
5291 int top_p;
5293 rtx exp_rtl = 0;
5294 int i, nops;
5295 static int save_expr_count;
5296 static int save_expr_size = 0;
5297 static tree *save_expr_rewritten;
5298 static tree save_expr_trees[256];
5300 if (x == 0
5301 /* If EXP has varying size, we MUST use a target since we currently
5302 have no way of allocating temporaries of variable size
5303 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5304 So we assume here that something at a higher level has prevented a
5305 clash. This is somewhat bogus, but the best we can do. Only
5306 do this when X is BLKmode and when we are at the top level. */
5307 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5308 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5309 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5310 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5311 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5312 != INTEGER_CST)
5313 && GET_MODE (x) == BLKmode))
5314 return 1;
5316 if (top_p && save_expr_size == 0)
5318 int rtn;
5320 save_expr_count = 0;
5321 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5322 save_expr_rewritten = &save_expr_trees[0];
5324 rtn = safe_from_p (x, exp, 1);
5326 for (i = 0; i < save_expr_count; ++i)
5328 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5329 abort ();
5330 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5333 save_expr_size = 0;
5335 return rtn;
5338 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5339 find the underlying pseudo. */
5340 if (GET_CODE (x) == SUBREG)
5342 x = SUBREG_REG (x);
5343 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5344 return 0;
5347 /* If X is a location in the outgoing argument area, it is always safe. */
5348 if (GET_CODE (x) == MEM
5349 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5350 || (GET_CODE (XEXP (x, 0)) == PLUS
5351 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5352 return 1;
5354 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5356 case 'd':
5357 exp_rtl = DECL_RTL (exp);
5358 break;
5360 case 'c':
5361 return 1;
5363 case 'x':
5364 if (TREE_CODE (exp) == TREE_LIST)
5365 return ((TREE_VALUE (exp) == 0
5366 || safe_from_p (x, TREE_VALUE (exp), 0))
5367 && (TREE_CHAIN (exp) == 0
5368 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5369 else if (TREE_CODE (exp) == ERROR_MARK)
5370 return 1; /* An already-visited SAVE_EXPR? */
5371 else
5372 return 0;
5374 case '1':
5375 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5377 case '2':
5378 case '<':
5379 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5380 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5382 case 'e':
5383 case 'r':
5384 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5385 the expression. If it is set, we conflict iff we are that rtx or
5386 both are in memory. Otherwise, we check all operands of the
5387 expression recursively. */
5389 switch (TREE_CODE (exp))
5391 case ADDR_EXPR:
5392 return (staticp (TREE_OPERAND (exp, 0))
5393 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5394 || TREE_STATIC (exp));
5396 case INDIRECT_REF:
5397 if (GET_CODE (x) == MEM)
5398 return 0;
5399 break;
5401 case CALL_EXPR:
5402 exp_rtl = CALL_EXPR_RTL (exp);
5403 if (exp_rtl == 0)
5405 /* Assume that the call will clobber all hard registers and
5406 all of memory. */
5407 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5408 || GET_CODE (x) == MEM)
5409 return 0;
5412 break;
5414 case RTL_EXPR:
5415 /* If a sequence exists, we would have to scan every instruction
5416 in the sequence to see if it was safe. This is probably not
5417 worthwhile. */
5418 if (RTL_EXPR_SEQUENCE (exp))
5419 return 0;
5421 exp_rtl = RTL_EXPR_RTL (exp);
5422 break;
5424 case WITH_CLEANUP_EXPR:
5425 exp_rtl = RTL_EXPR_RTL (exp);
5426 break;
5428 case CLEANUP_POINT_EXPR:
5429 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5431 case SAVE_EXPR:
5432 exp_rtl = SAVE_EXPR_RTL (exp);
5433 if (exp_rtl)
5434 break;
5436 /* This SAVE_EXPR might appear many times in the top-level
5437 safe_from_p() expression, and if it has a complex
5438 subexpression, examining it multiple times could result
5439 in a combinatorial explosion. E.g. on an Alpha
5440 running at least 200MHz, a Fortran test case compiled with
5441 optimization took about 28 minutes to compile -- even though
5442 it was only a few lines long, and the complicated line causing
5443 so much time to be spent in the earlier version of safe_from_p()
5444 had only 293 or so unique nodes.
5446 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5447 where it is so we can turn it back in the top-level safe_from_p()
5448 when we're done. */
5450 /* For now, don't bother re-sizing the array. */
5451 if (save_expr_count >= save_expr_size)
5452 return 0;
5453 save_expr_rewritten[save_expr_count++] = exp;
5455 nops = tree_code_length[(int) SAVE_EXPR];
5456 for (i = 0; i < nops; i++)
5458 tree operand = TREE_OPERAND (exp, i);
5459 if (operand == NULL_TREE)
5460 continue;
5461 TREE_SET_CODE (exp, ERROR_MARK);
5462 if (!safe_from_p (x, operand, 0))
5463 return 0;
5464 TREE_SET_CODE (exp, SAVE_EXPR);
5466 TREE_SET_CODE (exp, ERROR_MARK);
5467 return 1;
5469 case BIND_EXPR:
5470 /* The only operand we look at is operand 1. The rest aren't
5471 part of the expression. */
5472 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5474 case METHOD_CALL_EXPR:
5475 /* This takes a rtx argument, but shouldn't appear here. */
5476 abort ();
5478 default:
5479 break;
5482 /* If we have an rtx, we do not need to scan our operands. */
5483 if (exp_rtl)
5484 break;
5486 nops = tree_code_length[(int) TREE_CODE (exp)];
5487 for (i = 0; i < nops; i++)
5488 if (TREE_OPERAND (exp, i) != 0
5489 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5490 return 0;
5493 /* If we have an rtl, find any enclosed object. Then see if we conflict
5494 with it. */
5495 if (exp_rtl)
5497 if (GET_CODE (exp_rtl) == SUBREG)
5499 exp_rtl = SUBREG_REG (exp_rtl);
5500 if (GET_CODE (exp_rtl) == REG
5501 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5502 return 0;
5505 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5506 are memory and EXP is not readonly. */
5507 return ! (rtx_equal_p (x, exp_rtl)
5508 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5509 && ! TREE_READONLY (exp)));
5512 /* If we reach here, it is safe. */
5513 return 1;
5516 /* Subroutine of expand_expr: return nonzero iff EXP is an
5517 expression whose type is statically determinable. */
5519 static int
5520 fixed_type_p (exp)
5521 tree exp;
5523 if (TREE_CODE (exp) == PARM_DECL
5524 || TREE_CODE (exp) == VAR_DECL
5525 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5526 || TREE_CODE (exp) == COMPONENT_REF
5527 || TREE_CODE (exp) == ARRAY_REF)
5528 return 1;
5529 return 0;
5532 /* Subroutine of expand_expr: return rtx if EXP is a
5533 variable or parameter; else return 0. */
5535 static rtx
5536 var_rtx (exp)
5537 tree exp;
5539 STRIP_NOPS (exp);
5540 switch (TREE_CODE (exp))
5542 case PARM_DECL:
5543 case VAR_DECL:
5544 return DECL_RTL (exp);
5545 default:
5546 return 0;
5550 #ifdef MAX_INTEGER_COMPUTATION_MODE
5551 void
5552 check_max_integer_computation_mode (exp)
5553 tree exp;
5555 enum tree_code code;
5556 enum machine_mode mode;
5558 /* Strip any NOPs that don't change the mode. */
5559 STRIP_NOPS (exp);
5560 code = TREE_CODE (exp);
5562 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5563 if (code == NOP_EXPR
5564 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5565 return;
5567 /* First check the type of the overall operation. We need only look at
5568 unary, binary and relational operations. */
5569 if (TREE_CODE_CLASS (code) == '1'
5570 || TREE_CODE_CLASS (code) == '2'
5571 || TREE_CODE_CLASS (code) == '<')
5573 mode = TYPE_MODE (TREE_TYPE (exp));
5574 if (GET_MODE_CLASS (mode) == MODE_INT
5575 && mode > MAX_INTEGER_COMPUTATION_MODE)
5576 fatal ("unsupported wide integer operation");
5579 /* Check operand of a unary op. */
5580 if (TREE_CODE_CLASS (code) == '1')
5582 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5583 if (GET_MODE_CLASS (mode) == MODE_INT
5584 && mode > MAX_INTEGER_COMPUTATION_MODE)
5585 fatal ("unsupported wide integer operation");
5588 /* Check operands of a binary/comparison op. */
5589 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5591 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5592 if (GET_MODE_CLASS (mode) == MODE_INT
5593 && mode > MAX_INTEGER_COMPUTATION_MODE)
5594 fatal ("unsupported wide integer operation");
5596 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5597 if (GET_MODE_CLASS (mode) == MODE_INT
5598 && mode > MAX_INTEGER_COMPUTATION_MODE)
5599 fatal ("unsupported wide integer operation");
5602 #endif
5605 /* Utility function used by expand_expr to see if TYPE, a RECORD_TYPE,
5606 has any readonly fields. If any of the fields have types that
5607 contain readonly fields, return true as well. */
5609 static int
5610 readonly_fields_p (type)
5611 tree type;
5613 tree field;
5615 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
5616 if (TREE_CODE (field) == FIELD_DECL
5617 && (TREE_READONLY (field)
5618 || (TREE_CODE (TREE_TYPE (field)) == RECORD_TYPE
5619 && readonly_fields_p (TREE_TYPE (field)))))
5620 return 1;
5622 return 0;
5625 /* expand_expr: generate code for computing expression EXP.
5626 An rtx for the computed value is returned. The value is never null.
5627 In the case of a void EXP, const0_rtx is returned.
5629 The value may be stored in TARGET if TARGET is nonzero.
5630 TARGET is just a suggestion; callers must assume that
5631 the rtx returned may not be the same as TARGET.
5633 If TARGET is CONST0_RTX, it means that the value will be ignored.
5635 If TMODE is not VOIDmode, it suggests generating the
5636 result in mode TMODE. But this is done only when convenient.
5637 Otherwise, TMODE is ignored and the value generated in its natural mode.
5638 TMODE is just a suggestion; callers must assume that
5639 the rtx returned may not have mode TMODE.
5641 Note that TARGET may have neither TMODE nor MODE. In that case, it
5642 probably will not be used.
5644 If MODIFIER is EXPAND_SUM then when EXP is an addition
5645 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5646 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5647 products as above, or REG or MEM, or constant.
5648 Ordinarily in such cases we would output mul or add instructions
5649 and then return a pseudo reg containing the sum.
5651 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5652 it also marks a label as absolutely required (it can't be dead).
5653 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5654 This is used for outputting expressions used in initializers.
5656 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5657 with a constant address even if that address is not normally legitimate.
5658 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5661 expand_expr (exp, target, tmode, modifier)
5662 register tree exp;
5663 rtx target;
5664 enum machine_mode tmode;
5665 enum expand_modifier modifier;
5667 register rtx op0, op1, temp;
5668 tree type = TREE_TYPE (exp);
5669 int unsignedp = TREE_UNSIGNED (type);
5670 register enum machine_mode mode;
5671 register enum tree_code code = TREE_CODE (exp);
5672 optab this_optab;
5673 rtx subtarget, original_target;
5674 int ignore;
5675 tree context;
5676 /* Used by check-memory-usage to make modifier read only. */
5677 enum expand_modifier ro_modifier;
5679 /* Handle ERROR_MARK before anybody tries to access its type. */
5680 if (TREE_CODE (exp) == ERROR_MARK)
5682 op0 = CONST0_RTX (tmode);
5683 if (op0 != 0)
5684 return op0;
5685 return const0_rtx;
5688 mode = TYPE_MODE (type);
5689 /* Use subtarget as the target for operand 0 of a binary operation. */
5690 subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5691 original_target = target;
5692 ignore = (target == const0_rtx
5693 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5694 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5695 || code == COND_EXPR)
5696 && TREE_CODE (type) == VOID_TYPE));
5698 /* Make a read-only version of the modifier. */
5699 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5700 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5701 ro_modifier = modifier;
5702 else
5703 ro_modifier = EXPAND_NORMAL;
5705 /* Don't use hard regs as subtargets, because the combiner
5706 can only handle pseudo regs. */
5707 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5708 subtarget = 0;
5709 /* Avoid subtargets inside loops,
5710 since they hide some invariant expressions. */
5711 if (preserve_subexpressions_p ())
5712 subtarget = 0;
5714 /* If we are going to ignore this result, we need only do something
5715 if there is a side-effect somewhere in the expression. If there
5716 is, short-circuit the most common cases here. Note that we must
5717 not call expand_expr with anything but const0_rtx in case this
5718 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5720 if (ignore)
5722 if (! TREE_SIDE_EFFECTS (exp))
5723 return const0_rtx;
5725 /* Ensure we reference a volatile object even if value is ignored, but
5726 don't do this if all we are doing is taking its address. */
5727 if (TREE_THIS_VOLATILE (exp)
5728 && TREE_CODE (exp) != FUNCTION_DECL
5729 && mode != VOIDmode && mode != BLKmode
5730 && modifier != EXPAND_CONST_ADDRESS)
5732 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5733 if (GET_CODE (temp) == MEM)
5734 temp = copy_to_reg (temp);
5735 return const0_rtx;
5738 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
5739 || code == INDIRECT_REF || code == BUFFER_REF)
5740 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5741 VOIDmode, ro_modifier);
5742 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
5743 || code == ARRAY_REF)
5745 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5746 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5747 return const0_rtx;
5749 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5750 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5751 /* If the second operand has no side effects, just evaluate
5752 the first. */
5753 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5754 VOIDmode, ro_modifier);
5755 else if (code == BIT_FIELD_REF)
5757 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5758 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5759 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
5760 return const0_rtx;
5763 target = 0;
5766 #ifdef MAX_INTEGER_COMPUTATION_MODE
5767 /* Only check stuff here if the mode we want is different from the mode
5768 of the expression; if it's the same, check_max_integer_computiation_mode
5769 will handle it. Do we really need to check this stuff at all? */
5771 if (target
5772 && GET_MODE (target) != mode
5773 && TREE_CODE (exp) != INTEGER_CST
5774 && TREE_CODE (exp) != PARM_DECL
5775 && TREE_CODE (exp) != ARRAY_REF
5776 && TREE_CODE (exp) != COMPONENT_REF
5777 && TREE_CODE (exp) != BIT_FIELD_REF
5778 && TREE_CODE (exp) != INDIRECT_REF
5779 && TREE_CODE (exp) != CALL_EXPR
5780 && TREE_CODE (exp) != VAR_DECL
5781 && TREE_CODE (exp) != RTL_EXPR)
5783 enum machine_mode mode = GET_MODE (target);
5785 if (GET_MODE_CLASS (mode) == MODE_INT
5786 && mode > MAX_INTEGER_COMPUTATION_MODE)
5787 fatal ("unsupported wide integer operation");
5790 if (tmode != mode
5791 && TREE_CODE (exp) != INTEGER_CST
5792 && TREE_CODE (exp) != PARM_DECL
5793 && TREE_CODE (exp) != ARRAY_REF
5794 && TREE_CODE (exp) != COMPONENT_REF
5795 && TREE_CODE (exp) != BIT_FIELD_REF
5796 && TREE_CODE (exp) != INDIRECT_REF
5797 && TREE_CODE (exp) != VAR_DECL
5798 && TREE_CODE (exp) != CALL_EXPR
5799 && TREE_CODE (exp) != RTL_EXPR
5800 && GET_MODE_CLASS (tmode) == MODE_INT
5801 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5802 fatal ("unsupported wide integer operation");
5804 check_max_integer_computation_mode (exp);
5805 #endif
5807 /* If will do cse, generate all results into pseudo registers
5808 since 1) that allows cse to find more things
5809 and 2) otherwise cse could produce an insn the machine
5810 cannot support. */
5812 if (! cse_not_expected && mode != BLKmode && target
5813 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5814 target = subtarget;
5816 switch (code)
5818 case LABEL_DECL:
5820 tree function = decl_function_context (exp);
5821 /* Handle using a label in a containing function. */
5822 if (function != current_function_decl
5823 && function != inline_function_decl && function != 0)
5825 struct function *p = find_function_data (function);
5826 /* Allocate in the memory associated with the function
5827 that the label is in. */
5828 push_obstacks (p->function_obstack,
5829 p->function_maybepermanent_obstack);
5831 p->expr->x_forced_labels
5832 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
5833 p->expr->x_forced_labels);
5834 pop_obstacks ();
5836 else
5838 if (modifier == EXPAND_INITIALIZER)
5839 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5840 label_rtx (exp),
5841 forced_labels);
5844 temp = gen_rtx_MEM (FUNCTION_MODE,
5845 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5846 if (function != current_function_decl
5847 && function != inline_function_decl && function != 0)
5848 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5849 return temp;
5852 case PARM_DECL:
5853 if (DECL_RTL (exp) == 0)
5855 error_with_decl (exp, "prior parameter's size depends on `%s'");
5856 return CONST0_RTX (mode);
5859 /* ... fall through ... */
5861 case VAR_DECL:
5862 /* If a static var's type was incomplete when the decl was written,
5863 but the type is complete now, lay out the decl now. */
5864 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5865 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5867 push_obstacks_nochange ();
5868 end_temporary_allocation ();
5869 layout_decl (exp, 0);
5870 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5871 pop_obstacks ();
5874 /* Although static-storage variables start off initialized, according to
5875 ANSI C, a memcpy could overwrite them with uninitialized values. So
5876 we check them too. This also lets us check for read-only variables
5877 accessed via a non-const declaration, in case it won't be detected
5878 any other way (e.g., in an embedded system or OS kernel without
5879 memory protection).
5881 Aggregates are not checked here; they're handled elsewhere. */
5882 if (cfun && current_function_check_memory_usage
5883 && code == VAR_DECL
5884 && GET_CODE (DECL_RTL (exp)) == MEM
5885 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5887 enum memory_use_mode memory_usage;
5888 memory_usage = get_memory_usage_from_modifier (modifier);
5890 if (memory_usage != MEMORY_USE_DONT)
5891 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5892 XEXP (DECL_RTL (exp), 0), Pmode,
5893 GEN_INT (int_size_in_bytes (type)),
5894 TYPE_MODE (sizetype),
5895 GEN_INT (memory_usage),
5896 TYPE_MODE (integer_type_node));
5899 /* ... fall through ... */
5901 case FUNCTION_DECL:
5902 case RESULT_DECL:
5903 if (DECL_RTL (exp) == 0)
5904 abort ();
5906 /* Ensure variable marked as used even if it doesn't go through
5907 a parser. If it hasn't be used yet, write out an external
5908 definition. */
5909 if (! TREE_USED (exp))
5911 assemble_external (exp);
5912 TREE_USED (exp) = 1;
5915 /* Show we haven't gotten RTL for this yet. */
5916 temp = 0;
5918 /* Handle variables inherited from containing functions. */
5919 context = decl_function_context (exp);
5921 /* We treat inline_function_decl as an alias for the current function
5922 because that is the inline function whose vars, types, etc.
5923 are being merged into the current function.
5924 See expand_inline_function. */
5926 if (context != 0 && context != current_function_decl
5927 && context != inline_function_decl
5928 /* If var is static, we don't need a static chain to access it. */
5929 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5930 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5932 rtx addr;
5934 /* Mark as non-local and addressable. */
5935 DECL_NONLOCAL (exp) = 1;
5936 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5937 abort ();
5938 mark_addressable (exp);
5939 if (GET_CODE (DECL_RTL (exp)) != MEM)
5940 abort ();
5941 addr = XEXP (DECL_RTL (exp), 0);
5942 if (GET_CODE (addr) == MEM)
5943 addr = gen_rtx_MEM (Pmode,
5944 fix_lexical_addr (XEXP (addr, 0), exp));
5945 else
5946 addr = fix_lexical_addr (addr, exp);
5947 temp = change_address (DECL_RTL (exp), mode, addr);
5950 /* This is the case of an array whose size is to be determined
5951 from its initializer, while the initializer is still being parsed.
5952 See expand_decl. */
5954 else if (GET_CODE (DECL_RTL (exp)) == MEM
5955 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5956 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5957 XEXP (DECL_RTL (exp), 0));
5959 /* If DECL_RTL is memory, we are in the normal case and either
5960 the address is not valid or it is not a register and -fforce-addr
5961 is specified, get the address into a register. */
5963 else if (GET_CODE (DECL_RTL (exp)) == MEM
5964 && modifier != EXPAND_CONST_ADDRESS
5965 && modifier != EXPAND_SUM
5966 && modifier != EXPAND_INITIALIZER
5967 && (! memory_address_p (DECL_MODE (exp),
5968 XEXP (DECL_RTL (exp), 0))
5969 || (flag_force_addr
5970 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5971 temp = change_address (DECL_RTL (exp), VOIDmode,
5972 copy_rtx (XEXP (DECL_RTL (exp), 0)));
5974 /* If we got something, return it. But first, set the alignment
5975 the address is a register. */
5976 if (temp != 0)
5978 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5979 mark_reg_pointer (XEXP (temp, 0),
5980 DECL_ALIGN (exp) / BITS_PER_UNIT);
5982 return temp;
5985 /* If the mode of DECL_RTL does not match that of the decl, it
5986 must be a promoted value. We return a SUBREG of the wanted mode,
5987 but mark it so that we know that it was already extended. */
5989 if (GET_CODE (DECL_RTL (exp)) == REG
5990 && GET_MODE (DECL_RTL (exp)) != mode)
5992 /* Get the signedness used for this variable. Ensure we get the
5993 same mode we got when the variable was declared. */
5994 if (GET_MODE (DECL_RTL (exp))
5995 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
5996 abort ();
5998 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
5999 SUBREG_PROMOTED_VAR_P (temp) = 1;
6000 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6001 return temp;
6004 return DECL_RTL (exp);
6006 case INTEGER_CST:
6007 return immed_double_const (TREE_INT_CST_LOW (exp),
6008 TREE_INT_CST_HIGH (exp),
6009 mode);
6011 case CONST_DECL:
6012 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6013 EXPAND_MEMORY_USE_BAD);
6015 case REAL_CST:
6016 /* If optimized, generate immediate CONST_DOUBLE
6017 which will be turned into memory by reload if necessary.
6019 We used to force a register so that loop.c could see it. But
6020 this does not allow gen_* patterns to perform optimizations with
6021 the constants. It also produces two insns in cases like "x = 1.0;".
6022 On most machines, floating-point constants are not permitted in
6023 many insns, so we'd end up copying it to a register in any case.
6025 Now, we do the copying in expand_binop, if appropriate. */
6026 return immed_real_const (exp);
6028 case COMPLEX_CST:
6029 case STRING_CST:
6030 if (! TREE_CST_RTL (exp))
6031 output_constant_def (exp);
6033 /* TREE_CST_RTL probably contains a constant address.
6034 On RISC machines where a constant address isn't valid,
6035 make some insns to get that address into a register. */
6036 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6037 && modifier != EXPAND_CONST_ADDRESS
6038 && modifier != EXPAND_INITIALIZER
6039 && modifier != EXPAND_SUM
6040 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6041 || (flag_force_addr
6042 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6043 return change_address (TREE_CST_RTL (exp), VOIDmode,
6044 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6045 return TREE_CST_RTL (exp);
6047 case EXPR_WITH_FILE_LOCATION:
6049 rtx to_return;
6050 char *saved_input_filename = input_filename;
6051 int saved_lineno = lineno;
6052 input_filename = EXPR_WFL_FILENAME (exp);
6053 lineno = EXPR_WFL_LINENO (exp);
6054 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6055 emit_line_note (input_filename, lineno);
6056 /* Possibly avoid switching back and force here */
6057 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6058 input_filename = saved_input_filename;
6059 lineno = saved_lineno;
6060 return to_return;
6063 case SAVE_EXPR:
6064 context = decl_function_context (exp);
6066 /* If this SAVE_EXPR was at global context, assume we are an
6067 initialization function and move it into our context. */
6068 if (context == 0)
6069 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6071 /* We treat inline_function_decl as an alias for the current function
6072 because that is the inline function whose vars, types, etc.
6073 are being merged into the current function.
6074 See expand_inline_function. */
6075 if (context == current_function_decl || context == inline_function_decl)
6076 context = 0;
6078 /* If this is non-local, handle it. */
6079 if (context)
6081 /* The following call just exists to abort if the context is
6082 not of a containing function. */
6083 find_function_data (context);
6085 temp = SAVE_EXPR_RTL (exp);
6086 if (temp && GET_CODE (temp) == REG)
6088 put_var_into_stack (exp);
6089 temp = SAVE_EXPR_RTL (exp);
6091 if (temp == 0 || GET_CODE (temp) != MEM)
6092 abort ();
6093 return change_address (temp, mode,
6094 fix_lexical_addr (XEXP (temp, 0), exp));
6096 if (SAVE_EXPR_RTL (exp) == 0)
6098 if (mode == VOIDmode)
6099 temp = const0_rtx;
6100 else
6101 temp = assign_temp (type, 3, 0, 0);
6103 SAVE_EXPR_RTL (exp) = temp;
6104 if (!optimize && GET_CODE (temp) == REG)
6105 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6106 save_expr_regs);
6108 /* If the mode of TEMP does not match that of the expression, it
6109 must be a promoted value. We pass store_expr a SUBREG of the
6110 wanted mode but mark it so that we know that it was already
6111 extended. Note that `unsignedp' was modified above in
6112 this case. */
6114 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6116 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6117 SUBREG_PROMOTED_VAR_P (temp) = 1;
6118 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6121 if (temp == const0_rtx)
6122 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6123 EXPAND_MEMORY_USE_BAD);
6124 else
6125 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6127 TREE_USED (exp) = 1;
6130 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6131 must be a promoted value. We return a SUBREG of the wanted mode,
6132 but mark it so that we know that it was already extended. */
6134 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6135 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6137 /* Compute the signedness and make the proper SUBREG. */
6138 promote_mode (type, mode, &unsignedp, 0);
6139 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6140 SUBREG_PROMOTED_VAR_P (temp) = 1;
6141 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6142 return temp;
6145 return SAVE_EXPR_RTL (exp);
6147 case UNSAVE_EXPR:
6149 rtx temp;
6150 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6151 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6152 return temp;
6155 case PLACEHOLDER_EXPR:
6157 tree placeholder_expr;
6159 /* If there is an object on the head of the placeholder list,
6160 see if some object in it of type TYPE or a pointer to it. For
6161 further information, see tree.def. */
6162 for (placeholder_expr = placeholder_list;
6163 placeholder_expr != 0;
6164 placeholder_expr = TREE_CHAIN (placeholder_expr))
6166 tree need_type = TYPE_MAIN_VARIANT (type);
6167 tree object = 0;
6168 tree old_list = placeholder_list;
6169 tree elt;
6171 /* Find the outermost reference that is of the type we want.
6172 If none, see if any object has a type that is a pointer to
6173 the type we want. */
6174 for (elt = TREE_PURPOSE (placeholder_expr);
6175 elt != 0 && object == 0;
6177 = ((TREE_CODE (elt) == COMPOUND_EXPR
6178 || TREE_CODE (elt) == COND_EXPR)
6179 ? TREE_OPERAND (elt, 1)
6180 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6181 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6182 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6183 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6184 ? TREE_OPERAND (elt, 0) : 0))
6185 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6186 object = elt;
6188 for (elt = TREE_PURPOSE (placeholder_expr);
6189 elt != 0 && object == 0;
6191 = ((TREE_CODE (elt) == COMPOUND_EXPR
6192 || TREE_CODE (elt) == COND_EXPR)
6193 ? TREE_OPERAND (elt, 1)
6194 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6195 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6196 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6197 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6198 ? TREE_OPERAND (elt, 0) : 0))
6199 if (POINTER_TYPE_P (TREE_TYPE (elt))
6200 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6201 == need_type))
6202 object = build1 (INDIRECT_REF, need_type, elt);
6204 if (object != 0)
6206 /* Expand this object skipping the list entries before
6207 it was found in case it is also a PLACEHOLDER_EXPR.
6208 In that case, we want to translate it using subsequent
6209 entries. */
6210 placeholder_list = TREE_CHAIN (placeholder_expr);
6211 temp = expand_expr (object, original_target, tmode,
6212 ro_modifier);
6213 placeholder_list = old_list;
6214 return temp;
6219 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6220 abort ();
6222 case WITH_RECORD_EXPR:
6223 /* Put the object on the placeholder list, expand our first operand,
6224 and pop the list. */
6225 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6226 placeholder_list);
6227 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6228 tmode, ro_modifier);
6229 placeholder_list = TREE_CHAIN (placeholder_list);
6230 return target;
6232 case GOTO_EXPR:
6233 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6234 expand_goto (TREE_OPERAND (exp, 0));
6235 else
6236 expand_computed_goto (TREE_OPERAND (exp, 0));
6237 return const0_rtx;
6239 case EXIT_EXPR:
6240 expand_exit_loop_if_false (NULL_PTR,
6241 invert_truthvalue (TREE_OPERAND (exp, 0)));
6242 return const0_rtx;
6244 case LABELED_BLOCK_EXPR:
6245 if (LABELED_BLOCK_BODY (exp))
6246 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6247 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6248 return const0_rtx;
6250 case EXIT_BLOCK_EXPR:
6251 if (EXIT_BLOCK_RETURN (exp))
6252 sorry ("returned value in block_exit_expr");
6253 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6254 return const0_rtx;
6256 case LOOP_EXPR:
6257 push_temp_slots ();
6258 expand_start_loop (1);
6259 expand_expr_stmt (TREE_OPERAND (exp, 0));
6260 expand_end_loop ();
6261 pop_temp_slots ();
6263 return const0_rtx;
6265 case BIND_EXPR:
6267 tree vars = TREE_OPERAND (exp, 0);
6268 int vars_need_expansion = 0;
6270 /* Need to open a binding contour here because
6271 if there are any cleanups they must be contained here. */
6272 expand_start_bindings (2);
6274 /* Mark the corresponding BLOCK for output in its proper place. */
6275 if (TREE_OPERAND (exp, 2) != 0
6276 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6277 insert_block (TREE_OPERAND (exp, 2));
6279 /* If VARS have not yet been expanded, expand them now. */
6280 while (vars)
6282 if (DECL_RTL (vars) == 0)
6284 vars_need_expansion = 1;
6285 expand_decl (vars);
6287 expand_decl_init (vars);
6288 vars = TREE_CHAIN (vars);
6291 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6293 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6295 return temp;
6298 case RTL_EXPR:
6299 if (RTL_EXPR_SEQUENCE (exp))
6301 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6302 abort ();
6303 emit_insns (RTL_EXPR_SEQUENCE (exp));
6304 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6306 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6307 free_temps_for_rtl_expr (exp);
6308 return RTL_EXPR_RTL (exp);
6310 case CONSTRUCTOR:
6311 /* If we don't need the result, just ensure we evaluate any
6312 subexpressions. */
6313 if (ignore)
6315 tree elt;
6316 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6317 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6318 EXPAND_MEMORY_USE_BAD);
6319 return const0_rtx;
6322 /* All elts simple constants => refer to a constant in memory. But
6323 if this is a non-BLKmode mode, let it store a field at a time
6324 since that should make a CONST_INT or CONST_DOUBLE when we
6325 fold. Likewise, if we have a target we can use, it is best to
6326 store directly into the target unless the type is large enough
6327 that memcpy will be used. If we are making an initializer and
6328 all operands are constant, put it in memory as well. */
6329 else if ((TREE_STATIC (exp)
6330 && ((mode == BLKmode
6331 && ! (target != 0 && safe_from_p (target, exp, 1)))
6332 || TREE_ADDRESSABLE (exp)
6333 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
6334 && (!MOVE_BY_PIECES_P
6335 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
6336 TYPE_ALIGN (type) / BITS_PER_UNIT))
6337 && ! mostly_zeros_p (exp))))
6338 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6340 rtx constructor = output_constant_def (exp);
6341 if (modifier != EXPAND_CONST_ADDRESS
6342 && modifier != EXPAND_INITIALIZER
6343 && modifier != EXPAND_SUM
6344 && (! memory_address_p (GET_MODE (constructor),
6345 XEXP (constructor, 0))
6346 || (flag_force_addr
6347 && GET_CODE (XEXP (constructor, 0)) != REG)))
6348 constructor = change_address (constructor, VOIDmode,
6349 XEXP (constructor, 0));
6350 return constructor;
6353 else
6355 /* Handle calls that pass values in multiple non-contiguous
6356 locations. The Irix 6 ABI has examples of this. */
6357 if (target == 0 || ! safe_from_p (target, exp, 1)
6358 || GET_CODE (target) == PARALLEL)
6360 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6361 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6362 else
6363 target = assign_temp (type, 0, 1, 1);
6366 if (TREE_READONLY (exp))
6368 if (GET_CODE (target) == MEM)
6369 target = copy_rtx (target);
6371 RTX_UNCHANGING_P (target) = 1;
6374 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6375 int_size_in_bytes (TREE_TYPE (exp)));
6376 return target;
6379 case INDIRECT_REF:
6381 tree exp1 = TREE_OPERAND (exp, 0);
6382 tree exp2;
6383 tree index;
6384 tree string = string_constant (exp1, &index);
6385 int i;
6387 /* Try to optimize reads from const strings. */
6388 if (string
6389 && TREE_CODE (string) == STRING_CST
6390 && TREE_CODE (index) == INTEGER_CST
6391 && !TREE_INT_CST_HIGH (index)
6392 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
6393 && GET_MODE_CLASS (mode) == MODE_INT
6394 && GET_MODE_SIZE (mode) == 1
6395 && modifier != EXPAND_MEMORY_USE_WO)
6396 return GEN_INT (TREE_STRING_POINTER (string)[i]);
6398 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6399 op0 = memory_address (mode, op0);
6401 if (cfun && current_function_check_memory_usage
6402 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6404 enum memory_use_mode memory_usage;
6405 memory_usage = get_memory_usage_from_modifier (modifier);
6407 if (memory_usage != MEMORY_USE_DONT)
6409 in_check_memory_usage = 1;
6410 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6411 op0, Pmode,
6412 GEN_INT (int_size_in_bytes (type)),
6413 TYPE_MODE (sizetype),
6414 GEN_INT (memory_usage),
6415 TYPE_MODE (integer_type_node));
6416 in_check_memory_usage = 0;
6420 temp = gen_rtx_MEM (mode, op0);
6421 /* If address was computed by addition,
6422 mark this as an element of an aggregate. */
6423 if (TREE_CODE (exp1) == PLUS_EXPR
6424 || (TREE_CODE (exp1) == SAVE_EXPR
6425 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
6426 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
6427 || (TREE_CODE (exp1) == ADDR_EXPR
6428 && (exp2 = TREE_OPERAND (exp1, 0))
6429 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
6430 MEM_SET_IN_STRUCT_P (temp, 1);
6432 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
6433 MEM_ALIAS_SET (temp) = get_alias_set (exp);
6435 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6436 here, because, in C and C++, the fact that a location is accessed
6437 through a pointer to const does not mean that the value there can
6438 never change. Languages where it can never change should
6439 also set TREE_STATIC. */
6440 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6442 /* If we are writing to this object and its type is a record with
6443 readonly fields, we must mark it as readonly so it will
6444 conflict with readonly references to those fields. */
6445 if (modifier == EXPAND_MEMORY_USE_WO
6446 && TREE_CODE (type) == RECORD_TYPE && readonly_fields_p (type))
6447 RTX_UNCHANGING_P (temp) = 1;
6449 return temp;
6452 case ARRAY_REF:
6453 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6454 abort ();
6457 tree array = TREE_OPERAND (exp, 0);
6458 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6459 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6460 tree index = TREE_OPERAND (exp, 1);
6461 tree index_type = TREE_TYPE (index);
6462 HOST_WIDE_INT i;
6464 /* Optimize the special-case of a zero lower bound.
6466 We convert the low_bound to sizetype to avoid some problems
6467 with constant folding. (E.g. suppose the lower bound is 1,
6468 and its mode is QI. Without the conversion, (ARRAY
6469 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6470 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
6472 But sizetype isn't quite right either (especially if
6473 the lowbound is negative). FIXME */
6475 if (! integer_zerop (low_bound))
6476 index = fold (build (MINUS_EXPR, index_type, index,
6477 convert (sizetype, low_bound)));
6479 /* Fold an expression like: "foo"[2].
6480 This is not done in fold so it won't happen inside &.
6481 Don't fold if this is for wide characters since it's too
6482 difficult to do correctly and this is a very rare case. */
6484 if (TREE_CODE (array) == STRING_CST
6485 && TREE_CODE (index) == INTEGER_CST
6486 && !TREE_INT_CST_HIGH (index)
6487 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
6488 && GET_MODE_CLASS (mode) == MODE_INT
6489 && GET_MODE_SIZE (mode) == 1)
6490 return GEN_INT (TREE_STRING_POINTER (array)[i]);
6492 /* If this is a constant index into a constant array,
6493 just get the value from the array. Handle both the cases when
6494 we have an explicit constructor and when our operand is a variable
6495 that was declared const. */
6497 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
6499 if (TREE_CODE (index) == INTEGER_CST
6500 && TREE_INT_CST_HIGH (index) == 0)
6502 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
6504 i = TREE_INT_CST_LOW (index);
6505 while (elem && i--)
6506 elem = TREE_CHAIN (elem);
6507 if (elem)
6508 return expand_expr (fold (TREE_VALUE (elem)), target,
6509 tmode, ro_modifier);
6513 else if (optimize >= 1
6514 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6515 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6516 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6518 if (TREE_CODE (index) == INTEGER_CST)
6520 tree init = DECL_INITIAL (array);
6522 i = TREE_INT_CST_LOW (index);
6523 if (TREE_CODE (init) == CONSTRUCTOR)
6525 tree elem = CONSTRUCTOR_ELTS (init);
6527 while (elem
6528 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
6529 elem = TREE_CHAIN (elem);
6530 if (elem)
6531 return expand_expr (fold (TREE_VALUE (elem)), target,
6532 tmode, ro_modifier);
6534 else if (TREE_CODE (init) == STRING_CST
6535 && TREE_INT_CST_HIGH (index) == 0
6536 && (TREE_INT_CST_LOW (index)
6537 < TREE_STRING_LENGTH (init)))
6538 return (GEN_INT
6539 (TREE_STRING_POINTER
6540 (init)[TREE_INT_CST_LOW (index)]));
6545 /* ... fall through ... */
6547 case COMPONENT_REF:
6548 case BIT_FIELD_REF:
6549 /* If the operand is a CONSTRUCTOR, we can just extract the
6550 appropriate field if it is present. Don't do this if we have
6551 already written the data since we want to refer to that copy
6552 and varasm.c assumes that's what we'll do. */
6553 if (code != ARRAY_REF
6554 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6555 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6557 tree elt;
6559 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6560 elt = TREE_CHAIN (elt))
6561 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6562 /* We can normally use the value of the field in the
6563 CONSTRUCTOR. However, if this is a bitfield in
6564 an integral mode that we can fit in a HOST_WIDE_INT,
6565 we must mask only the number of bits in the bitfield,
6566 since this is done implicitly by the constructor. If
6567 the bitfield does not meet either of those conditions,
6568 we can't do this optimization. */
6569 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6570 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6571 == MODE_INT)
6572 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6573 <= HOST_BITS_PER_WIDE_INT))))
6575 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6576 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6578 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
6580 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6582 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6583 op0 = expand_and (op0, op1, target);
6585 else
6587 enum machine_mode imode
6588 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6589 tree count
6590 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6593 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6594 target, 0);
6595 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6596 target, 0);
6600 return op0;
6605 enum machine_mode mode1;
6606 int bitsize;
6607 int bitpos;
6608 tree offset;
6609 int volatilep = 0;
6610 unsigned int alignment;
6611 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6612 &mode1, &unsignedp, &volatilep,
6613 &alignment);
6615 /* If we got back the original object, something is wrong. Perhaps
6616 we are evaluating an expression too early. In any event, don't
6617 infinitely recurse. */
6618 if (tem == exp)
6619 abort ();
6621 /* If TEM's type is a union of variable size, pass TARGET to the inner
6622 computation, since it will need a temporary and TARGET is known
6623 to have to do. This occurs in unchecked conversion in Ada. */
6625 op0 = expand_expr (tem,
6626 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6627 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6628 != INTEGER_CST)
6629 ? target : NULL_RTX),
6630 VOIDmode,
6631 (modifier == EXPAND_INITIALIZER
6632 || modifier == EXPAND_CONST_ADDRESS)
6633 ? modifier : EXPAND_NORMAL);
6635 /* If this is a constant, put it into a register if it is a
6636 legitimate constant and OFFSET is 0 and memory if it isn't. */
6637 if (CONSTANT_P (op0))
6639 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6640 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6641 && offset == 0)
6642 op0 = force_reg (mode, op0);
6643 else
6644 op0 = validize_mem (force_const_mem (mode, op0));
6647 if (offset != 0)
6649 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6651 /* If this object is in memory, put it into a register.
6652 This case can't occur in C, but can in Ada if we have
6653 unchecked conversion of an expression from a scalar type to
6654 an array or record type. */
6655 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6656 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6658 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
6660 mark_temp_addr_taken (memloc);
6661 emit_move_insn (memloc, op0);
6662 op0 = memloc;
6665 if (GET_CODE (op0) != MEM)
6666 abort ();
6668 if (GET_MODE (offset_rtx) != ptr_mode)
6670 #ifdef POINTERS_EXTEND_UNSIGNED
6671 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6672 #else
6673 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6674 #endif
6677 /* A constant address in OP0 can have VOIDmode, we must not try
6678 to call force_reg for that case. Avoid that case. */
6679 if (GET_CODE (op0) == MEM
6680 && GET_MODE (op0) == BLKmode
6681 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6682 && bitsize != 0
6683 && (bitpos % bitsize) == 0
6684 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6685 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
6687 rtx temp = change_address (op0, mode1,
6688 plus_constant (XEXP (op0, 0),
6689 (bitpos /
6690 BITS_PER_UNIT)));
6691 if (GET_CODE (XEXP (temp, 0)) == REG)
6692 op0 = temp;
6693 else
6694 op0 = change_address (op0, mode1,
6695 force_reg (GET_MODE (XEXP (temp, 0)),
6696 XEXP (temp, 0)));
6697 bitpos = 0;
6701 op0 = change_address (op0, VOIDmode,
6702 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6703 force_reg (ptr_mode,
6704 offset_rtx)));
6707 /* Don't forget about volatility even if this is a bitfield. */
6708 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6710 op0 = copy_rtx (op0);
6711 MEM_VOLATILE_P (op0) = 1;
6714 /* Check the access. */
6715 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
6717 enum memory_use_mode memory_usage;
6718 memory_usage = get_memory_usage_from_modifier (modifier);
6720 if (memory_usage != MEMORY_USE_DONT)
6722 rtx to;
6723 int size;
6725 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6726 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6728 /* Check the access right of the pointer. */
6729 if (size > BITS_PER_UNIT)
6730 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6731 to, Pmode,
6732 GEN_INT (size / BITS_PER_UNIT),
6733 TYPE_MODE (sizetype),
6734 GEN_INT (memory_usage),
6735 TYPE_MODE (integer_type_node));
6739 /* In cases where an aligned union has an unaligned object
6740 as a field, we might be extracting a BLKmode value from
6741 an integer-mode (e.g., SImode) object. Handle this case
6742 by doing the extract into an object as wide as the field
6743 (which we know to be the width of a basic mode), then
6744 storing into memory, and changing the mode to BLKmode.
6745 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6746 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6747 if (mode1 == VOIDmode
6748 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6749 || (modifier != EXPAND_CONST_ADDRESS
6750 && modifier != EXPAND_INITIALIZER
6751 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6752 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6753 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6754 /* If the field isn't aligned enough to fetch as a memref,
6755 fetch it as a bit field. */
6756 || (mode1 != BLKmode
6757 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
6758 && ((TYPE_ALIGN (TREE_TYPE (tem))
6759 < (unsigned int) GET_MODE_ALIGNMENT (mode))
6760 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))))
6761 || (modifier != EXPAND_CONST_ADDRESS
6762 && modifier != EXPAND_INITIALIZER
6763 && mode == BLKmode
6764 && SLOW_UNALIGNED_ACCESS (mode, alignment)
6765 && (TYPE_ALIGN (type) > alignment * BITS_PER_UNIT
6766 || bitpos % TYPE_ALIGN (type) != 0)))
6768 enum machine_mode ext_mode = mode;
6770 if (ext_mode == BLKmode
6771 && ! (target != 0 && GET_CODE (op0) == MEM
6772 && GET_CODE (target) == MEM
6773 && bitpos % BITS_PER_UNIT == 0))
6774 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6776 if (ext_mode == BLKmode)
6778 /* In this case, BITPOS must start at a byte boundary and
6779 TARGET, if specified, must be a MEM. */
6780 if (GET_CODE (op0) != MEM
6781 || (target != 0 && GET_CODE (target) != MEM)
6782 || bitpos % BITS_PER_UNIT != 0)
6783 abort ();
6785 op0 = change_address (op0, VOIDmode,
6786 plus_constant (XEXP (op0, 0),
6787 bitpos / BITS_PER_UNIT));
6788 if (target == 0)
6789 target = assign_temp (type, 0, 1, 1);
6791 emit_block_move (target, op0,
6792 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6793 / BITS_PER_UNIT),
6796 return target;
6799 op0 = validize_mem (op0);
6801 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6802 mark_reg_pointer (XEXP (op0, 0), alignment);
6804 op0 = extract_bit_field (op0, bitsize, bitpos,
6805 unsignedp, target, ext_mode, ext_mode,
6806 alignment,
6807 int_size_in_bytes (TREE_TYPE (tem)));
6809 /* If the result is a record type and BITSIZE is narrower than
6810 the mode of OP0, an integral mode, and this is a big endian
6811 machine, we must put the field into the high-order bits. */
6812 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6813 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6814 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6815 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6816 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6817 - bitsize),
6818 op0, 1);
6820 if (mode == BLKmode)
6822 rtx new = assign_stack_temp (ext_mode,
6823 bitsize / BITS_PER_UNIT, 0);
6825 emit_move_insn (new, op0);
6826 op0 = copy_rtx (new);
6827 PUT_MODE (op0, BLKmode);
6828 MEM_SET_IN_STRUCT_P (op0, 1);
6831 return op0;
6834 /* If the result is BLKmode, use that to access the object
6835 now as well. */
6836 if (mode == BLKmode)
6837 mode1 = BLKmode;
6839 /* Get a reference to just this component. */
6840 if (modifier == EXPAND_CONST_ADDRESS
6841 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6842 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6843 (bitpos / BITS_PER_UNIT)));
6844 else
6845 op0 = change_address (op0, mode1,
6846 plus_constant (XEXP (op0, 0),
6847 (bitpos / BITS_PER_UNIT)));
6849 if (GET_CODE (op0) == MEM)
6850 MEM_ALIAS_SET (op0) = get_alias_set (exp);
6852 if (GET_CODE (XEXP (op0, 0)) == REG)
6853 mark_reg_pointer (XEXP (op0, 0), alignment);
6855 MEM_SET_IN_STRUCT_P (op0, 1);
6856 MEM_VOLATILE_P (op0) |= volatilep;
6857 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6858 || modifier == EXPAND_CONST_ADDRESS
6859 || modifier == EXPAND_INITIALIZER)
6860 return op0;
6861 else if (target == 0)
6862 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6864 convert_move (target, op0, unsignedp);
6865 return target;
6868 /* Intended for a reference to a buffer of a file-object in Pascal.
6869 But it's not certain that a special tree code will really be
6870 necessary for these. INDIRECT_REF might work for them. */
6871 case BUFFER_REF:
6872 abort ();
6874 case IN_EXPR:
6876 /* Pascal set IN expression.
6878 Algorithm:
6879 rlo = set_low - (set_low%bits_per_word);
6880 the_word = set [ (index - rlo)/bits_per_word ];
6881 bit_index = index % bits_per_word;
6882 bitmask = 1 << bit_index;
6883 return !!(the_word & bitmask); */
6885 tree set = TREE_OPERAND (exp, 0);
6886 tree index = TREE_OPERAND (exp, 1);
6887 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6888 tree set_type = TREE_TYPE (set);
6889 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6890 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6891 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6892 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6893 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6894 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6895 rtx setaddr = XEXP (setval, 0);
6896 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6897 rtx rlow;
6898 rtx diff, quo, rem, addr, bit, result;
6900 preexpand_calls (exp);
6902 /* If domain is empty, answer is no. Likewise if index is constant
6903 and out of bounds. */
6904 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6905 && TREE_CODE (set_low_bound) == INTEGER_CST
6906 && tree_int_cst_lt (set_high_bound, set_low_bound))
6907 || (TREE_CODE (index) == INTEGER_CST
6908 && TREE_CODE (set_low_bound) == INTEGER_CST
6909 && tree_int_cst_lt (index, set_low_bound))
6910 || (TREE_CODE (set_high_bound) == INTEGER_CST
6911 && TREE_CODE (index) == INTEGER_CST
6912 && tree_int_cst_lt (set_high_bound, index))))
6913 return const0_rtx;
6915 if (target == 0)
6916 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6918 /* If we get here, we have to generate the code for both cases
6919 (in range and out of range). */
6921 op0 = gen_label_rtx ();
6922 op1 = gen_label_rtx ();
6924 if (! (GET_CODE (index_val) == CONST_INT
6925 && GET_CODE (lo_r) == CONST_INT))
6927 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
6928 GET_MODE (index_val), iunsignedp, 0, op1);
6931 if (! (GET_CODE (index_val) == CONST_INT
6932 && GET_CODE (hi_r) == CONST_INT))
6934 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
6935 GET_MODE (index_val), iunsignedp, 0, op1);
6938 /* Calculate the element number of bit zero in the first word
6939 of the set. */
6940 if (GET_CODE (lo_r) == CONST_INT)
6941 rlow = GEN_INT (INTVAL (lo_r)
6942 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6943 else
6944 rlow = expand_binop (index_mode, and_optab, lo_r,
6945 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6946 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6948 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6949 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6951 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
6952 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6953 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
6954 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6956 addr = memory_address (byte_mode,
6957 expand_binop (index_mode, add_optab, diff,
6958 setaddr, NULL_RTX, iunsignedp,
6959 OPTAB_LIB_WIDEN));
6961 /* Extract the bit we want to examine */
6962 bit = expand_shift (RSHIFT_EXPR, byte_mode,
6963 gen_rtx_MEM (byte_mode, addr),
6964 make_tree (TREE_TYPE (index), rem),
6965 NULL_RTX, 1);
6966 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6967 GET_MODE (target) == byte_mode ? target : 0,
6968 1, OPTAB_LIB_WIDEN);
6970 if (result != target)
6971 convert_move (target, result, 1);
6973 /* Output the code to handle the out-of-range case. */
6974 emit_jump (op0);
6975 emit_label (op1);
6976 emit_move_insn (target, const0_rtx);
6977 emit_label (op0);
6978 return target;
6981 case WITH_CLEANUP_EXPR:
6982 if (RTL_EXPR_RTL (exp) == 0)
6984 RTL_EXPR_RTL (exp)
6985 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6986 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6988 /* That's it for this cleanup. */
6989 TREE_OPERAND (exp, 2) = 0;
6991 return RTL_EXPR_RTL (exp);
6993 case CLEANUP_POINT_EXPR:
6995 /* Start a new binding layer that will keep track of all cleanup
6996 actions to be performed. */
6997 expand_start_bindings (2);
6999 target_temp_slot_level = temp_slot_level;
7001 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7002 /* If we're going to use this value, load it up now. */
7003 if (! ignore)
7004 op0 = force_not_mem (op0);
7005 preserve_temp_slots (op0);
7006 expand_end_bindings (NULL_TREE, 0, 0);
7008 return op0;
7010 case CALL_EXPR:
7011 /* Check for a built-in function. */
7012 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7013 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7014 == FUNCTION_DECL)
7015 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7016 return expand_builtin (exp, target, subtarget, tmode, ignore);
7018 /* If this call was expanded already by preexpand_calls,
7019 just return the result we got. */
7020 if (CALL_EXPR_RTL (exp) != 0)
7021 return CALL_EXPR_RTL (exp);
7023 return expand_call (exp, target, ignore);
7025 case NON_LVALUE_EXPR:
7026 case NOP_EXPR:
7027 case CONVERT_EXPR:
7028 case REFERENCE_EXPR:
7029 if (TREE_CODE (type) == UNION_TYPE)
7031 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7033 /* If both input and output are BLKmode, this conversion
7034 isn't actually doing anything unless we need to make the
7035 alignment stricter. */
7036 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7037 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7038 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7039 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7040 modifier);
7042 if (target == 0)
7044 if (mode != BLKmode)
7045 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7046 else
7047 target = assign_temp (type, 0, 1, 1);
7050 if (GET_CODE (target) == MEM)
7051 /* Store data into beginning of memory target. */
7052 store_expr (TREE_OPERAND (exp, 0),
7053 change_address (target, TYPE_MODE (valtype), 0), 0);
7055 else if (GET_CODE (target) == REG)
7056 /* Store this field into a union of the proper type. */
7057 store_field (target,
7058 MIN ((int_size_in_bytes (TREE_TYPE
7059 (TREE_OPERAND (exp, 0)))
7060 * BITS_PER_UNIT),
7061 GET_MODE_BITSIZE (mode)),
7062 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7063 VOIDmode, 0, 1, int_size_in_bytes (type), 0);
7064 else
7065 abort ();
7067 /* Return the entire union. */
7068 return target;
7071 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7073 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7074 ro_modifier);
7076 /* If the signedness of the conversion differs and OP0 is
7077 a promoted SUBREG, clear that indication since we now
7078 have to do the proper extension. */
7079 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7080 && GET_CODE (op0) == SUBREG)
7081 SUBREG_PROMOTED_VAR_P (op0) = 0;
7083 return op0;
7086 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7087 if (GET_MODE (op0) == mode)
7088 return op0;
7090 /* If OP0 is a constant, just convert it into the proper mode. */
7091 if (CONSTANT_P (op0))
7092 return
7093 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7094 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7096 if (modifier == EXPAND_INITIALIZER)
7097 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7099 if (target == 0)
7100 return
7101 convert_to_mode (mode, op0,
7102 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7103 else
7104 convert_move (target, op0,
7105 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7106 return target;
7108 case PLUS_EXPR:
7109 /* We come here from MINUS_EXPR when the second operand is a
7110 constant. */
7111 plus_expr:
7112 this_optab = add_optab;
7114 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7115 something else, make sure we add the register to the constant and
7116 then to the other thing. This case can occur during strength
7117 reduction and doing it this way will produce better code if the
7118 frame pointer or argument pointer is eliminated.
7120 fold-const.c will ensure that the constant is always in the inner
7121 PLUS_EXPR, so the only case we need to do anything about is if
7122 sp, ap, or fp is our second argument, in which case we must swap
7123 the innermost first argument and our second argument. */
7125 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7126 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7127 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7128 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7129 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7130 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7132 tree t = TREE_OPERAND (exp, 1);
7134 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7135 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7138 /* If the result is to be ptr_mode and we are adding an integer to
7139 something, we might be forming a constant. So try to use
7140 plus_constant. If it produces a sum and we can't accept it,
7141 use force_operand. This allows P = &ARR[const] to generate
7142 efficient code on machines where a SYMBOL_REF is not a valid
7143 address.
7145 If this is an EXPAND_SUM call, always return the sum. */
7146 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7147 || mode == ptr_mode)
7149 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7150 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7151 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7153 rtx constant_part;
7155 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7156 EXPAND_SUM);
7157 /* Use immed_double_const to ensure that the constant is
7158 truncated according to the mode of OP1, then sign extended
7159 to a HOST_WIDE_INT. Using the constant directly can result
7160 in non-canonical RTL in a 64x32 cross compile. */
7161 constant_part
7162 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7163 (HOST_WIDE_INT) 0,
7164 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7165 op1 = plus_constant (op1, INTVAL (constant_part));
7166 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7167 op1 = force_operand (op1, target);
7168 return op1;
7171 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7172 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7173 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7175 rtx constant_part;
7177 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7178 EXPAND_SUM);
7179 if (! CONSTANT_P (op0))
7181 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7182 VOIDmode, modifier);
7183 /* Don't go to both_summands if modifier
7184 says it's not right to return a PLUS. */
7185 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7186 goto binop2;
7187 goto both_summands;
7189 /* Use immed_double_const to ensure that the constant is
7190 truncated according to the mode of OP1, then sign extended
7191 to a HOST_WIDE_INT. Using the constant directly can result
7192 in non-canonical RTL in a 64x32 cross compile. */
7193 constant_part
7194 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7195 (HOST_WIDE_INT) 0,
7196 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7197 op0 = plus_constant (op0, INTVAL (constant_part));
7198 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7199 op0 = force_operand (op0, target);
7200 return op0;
7204 /* No sense saving up arithmetic to be done
7205 if it's all in the wrong mode to form part of an address.
7206 And force_operand won't know whether to sign-extend or
7207 zero-extend. */
7208 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7209 || mode != ptr_mode)
7210 goto binop;
7212 preexpand_calls (exp);
7213 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7214 subtarget = 0;
7216 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7217 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7219 both_summands:
7220 /* Make sure any term that's a sum with a constant comes last. */
7221 if (GET_CODE (op0) == PLUS
7222 && CONSTANT_P (XEXP (op0, 1)))
7224 temp = op0;
7225 op0 = op1;
7226 op1 = temp;
7228 /* If adding to a sum including a constant,
7229 associate it to put the constant outside. */
7230 if (GET_CODE (op1) == PLUS
7231 && CONSTANT_P (XEXP (op1, 1)))
7233 rtx constant_term = const0_rtx;
7235 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7236 if (temp != 0)
7237 op0 = temp;
7238 /* Ensure that MULT comes first if there is one. */
7239 else if (GET_CODE (op0) == MULT)
7240 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7241 else
7242 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7244 /* Let's also eliminate constants from op0 if possible. */
7245 op0 = eliminate_constant_term (op0, &constant_term);
7247 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7248 their sum should be a constant. Form it into OP1, since the
7249 result we want will then be OP0 + OP1. */
7251 temp = simplify_binary_operation (PLUS, mode, constant_term,
7252 XEXP (op1, 1));
7253 if (temp != 0)
7254 op1 = temp;
7255 else
7256 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7259 /* Put a constant term last and put a multiplication first. */
7260 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7261 temp = op1, op1 = op0, op0 = temp;
7263 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7264 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7266 case MINUS_EXPR:
7267 /* For initializers, we are allowed to return a MINUS of two
7268 symbolic constants. Here we handle all cases when both operands
7269 are constant. */
7270 /* Handle difference of two symbolic constants,
7271 for the sake of an initializer. */
7272 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7273 && really_constant_p (TREE_OPERAND (exp, 0))
7274 && really_constant_p (TREE_OPERAND (exp, 1)))
7276 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7277 VOIDmode, ro_modifier);
7278 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7279 VOIDmode, ro_modifier);
7281 /* If the last operand is a CONST_INT, use plus_constant of
7282 the negated constant. Else make the MINUS. */
7283 if (GET_CODE (op1) == CONST_INT)
7284 return plus_constant (op0, - INTVAL (op1));
7285 else
7286 return gen_rtx_MINUS (mode, op0, op1);
7288 /* Convert A - const to A + (-const). */
7289 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7291 tree negated = fold (build1 (NEGATE_EXPR, type,
7292 TREE_OPERAND (exp, 1)));
7294 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7295 /* If we can't negate the constant in TYPE, leave it alone and
7296 expand_binop will negate it for us. We used to try to do it
7297 here in the signed version of TYPE, but that doesn't work
7298 on POINTER_TYPEs. */;
7299 else
7301 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7302 goto plus_expr;
7305 this_optab = sub_optab;
7306 goto binop;
7308 case MULT_EXPR:
7309 preexpand_calls (exp);
7310 /* If first operand is constant, swap them.
7311 Thus the following special case checks need only
7312 check the second operand. */
7313 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7315 register tree t1 = TREE_OPERAND (exp, 0);
7316 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7317 TREE_OPERAND (exp, 1) = t1;
7320 /* Attempt to return something suitable for generating an
7321 indexed address, for machines that support that. */
7323 if (modifier == EXPAND_SUM && mode == ptr_mode
7324 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7325 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7327 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7328 EXPAND_SUM);
7330 /* Apply distributive law if OP0 is x+c. */
7331 if (GET_CODE (op0) == PLUS
7332 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7333 return
7334 gen_rtx_PLUS
7335 (mode,
7336 gen_rtx_MULT
7337 (mode, XEXP (op0, 0),
7338 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7339 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7340 * INTVAL (XEXP (op0, 1))));
7342 if (GET_CODE (op0) != REG)
7343 op0 = force_operand (op0, NULL_RTX);
7344 if (GET_CODE (op0) != REG)
7345 op0 = copy_to_mode_reg (mode, op0);
7347 return
7348 gen_rtx_MULT (mode, op0,
7349 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7352 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7353 subtarget = 0;
7355 /* Check for multiplying things that have been extended
7356 from a narrower type. If this machine supports multiplying
7357 in that narrower type with a result in the desired type,
7358 do it that way, and avoid the explicit type-conversion. */
7359 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7360 && TREE_CODE (type) == INTEGER_TYPE
7361 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7362 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7363 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7364 && int_fits_type_p (TREE_OPERAND (exp, 1),
7365 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7366 /* Don't use a widening multiply if a shift will do. */
7367 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7368 > HOST_BITS_PER_WIDE_INT)
7369 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7371 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7372 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7374 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7375 /* If both operands are extended, they must either both
7376 be zero-extended or both be sign-extended. */
7377 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7379 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7381 enum machine_mode innermode
7382 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7383 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7384 ? smul_widen_optab : umul_widen_optab);
7385 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7386 ? umul_widen_optab : smul_widen_optab);
7387 if (mode == GET_MODE_WIDER_MODE (innermode))
7389 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7391 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7392 NULL_RTX, VOIDmode, 0);
7393 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7394 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7395 VOIDmode, 0);
7396 else
7397 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7398 NULL_RTX, VOIDmode, 0);
7399 goto binop2;
7401 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7402 && innermode == word_mode)
7404 rtx htem;
7405 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7406 NULL_RTX, VOIDmode, 0);
7407 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7408 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7409 VOIDmode, 0);
7410 else
7411 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7412 NULL_RTX, VOIDmode, 0);
7413 temp = expand_binop (mode, other_optab, op0, op1, target,
7414 unsignedp, OPTAB_LIB_WIDEN);
7415 htem = expand_mult_highpart_adjust (innermode,
7416 gen_highpart (innermode, temp),
7417 op0, op1,
7418 gen_highpart (innermode, temp),
7419 unsignedp);
7420 emit_move_insn (gen_highpart (innermode, temp), htem);
7421 return temp;
7425 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7426 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7427 return expand_mult (mode, op0, op1, target, unsignedp);
7429 case TRUNC_DIV_EXPR:
7430 case FLOOR_DIV_EXPR:
7431 case CEIL_DIV_EXPR:
7432 case ROUND_DIV_EXPR:
7433 case EXACT_DIV_EXPR:
7434 preexpand_calls (exp);
7435 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7436 subtarget = 0;
7437 /* Possible optimization: compute the dividend with EXPAND_SUM
7438 then if the divisor is constant can optimize the case
7439 where some terms of the dividend have coeffs divisible by it. */
7440 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7441 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7442 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7444 case RDIV_EXPR:
7445 this_optab = flodiv_optab;
7446 goto binop;
7448 case TRUNC_MOD_EXPR:
7449 case FLOOR_MOD_EXPR:
7450 case CEIL_MOD_EXPR:
7451 case ROUND_MOD_EXPR:
7452 preexpand_calls (exp);
7453 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7454 subtarget = 0;
7455 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7456 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7457 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7459 case FIX_ROUND_EXPR:
7460 case FIX_FLOOR_EXPR:
7461 case FIX_CEIL_EXPR:
7462 abort (); /* Not used for C. */
7464 case FIX_TRUNC_EXPR:
7465 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7466 if (target == 0)
7467 target = gen_reg_rtx (mode);
7468 expand_fix (target, op0, unsignedp);
7469 return target;
7471 case FLOAT_EXPR:
7472 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7473 if (target == 0)
7474 target = gen_reg_rtx (mode);
7475 /* expand_float can't figure out what to do if FROM has VOIDmode.
7476 So give it the correct mode. With -O, cse will optimize this. */
7477 if (GET_MODE (op0) == VOIDmode)
7478 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7479 op0);
7480 expand_float (target, op0,
7481 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7482 return target;
7484 case NEGATE_EXPR:
7485 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7486 temp = expand_unop (mode, neg_optab, op0, target, 0);
7487 if (temp == 0)
7488 abort ();
7489 return temp;
7491 case ABS_EXPR:
7492 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7494 /* Handle complex values specially. */
7495 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7496 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7497 return expand_complex_abs (mode, op0, target, unsignedp);
7499 /* Unsigned abs is simply the operand. Testing here means we don't
7500 risk generating incorrect code below. */
7501 if (TREE_UNSIGNED (type))
7502 return op0;
7504 return expand_abs (mode, op0, target,
7505 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7507 case MAX_EXPR:
7508 case MIN_EXPR:
7509 target = original_target;
7510 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7511 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7512 || GET_MODE (target) != mode
7513 || (GET_CODE (target) == REG
7514 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7515 target = gen_reg_rtx (mode);
7516 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7517 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7519 /* First try to do it with a special MIN or MAX instruction.
7520 If that does not win, use a conditional jump to select the proper
7521 value. */
7522 this_optab = (TREE_UNSIGNED (type)
7523 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7524 : (code == MIN_EXPR ? smin_optab : smax_optab));
7526 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7527 OPTAB_WIDEN);
7528 if (temp != 0)
7529 return temp;
7531 /* At this point, a MEM target is no longer useful; we will get better
7532 code without it. */
7534 if (GET_CODE (target) == MEM)
7535 target = gen_reg_rtx (mode);
7537 if (target != op0)
7538 emit_move_insn (target, op0);
7540 op0 = gen_label_rtx ();
7542 /* If this mode is an integer too wide to compare properly,
7543 compare word by word. Rely on cse to optimize constant cases. */
7544 if (GET_MODE_CLASS (mode) == MODE_INT
7545 && ! can_compare_p (GE, mode, ccp_jump))
7547 if (code == MAX_EXPR)
7548 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7549 target, op1, NULL_RTX, op0);
7550 else
7551 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7552 op1, target, NULL_RTX, op0);
7554 else
7556 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7557 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7558 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7559 op0);
7561 emit_move_insn (target, op1);
7562 emit_label (op0);
7563 return target;
7565 case BIT_NOT_EXPR:
7566 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7567 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7568 if (temp == 0)
7569 abort ();
7570 return temp;
7572 case FFS_EXPR:
7573 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7574 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7575 if (temp == 0)
7576 abort ();
7577 return temp;
7579 /* ??? Can optimize bitwise operations with one arg constant.
7580 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7581 and (a bitwise1 b) bitwise2 b (etc)
7582 but that is probably not worth while. */
7584 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7585 boolean values when we want in all cases to compute both of them. In
7586 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7587 as actual zero-or-1 values and then bitwise anding. In cases where
7588 there cannot be any side effects, better code would be made by
7589 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7590 how to recognize those cases. */
7592 case TRUTH_AND_EXPR:
7593 case BIT_AND_EXPR:
7594 this_optab = and_optab;
7595 goto binop;
7597 case TRUTH_OR_EXPR:
7598 case BIT_IOR_EXPR:
7599 this_optab = ior_optab;
7600 goto binop;
7602 case TRUTH_XOR_EXPR:
7603 case BIT_XOR_EXPR:
7604 this_optab = xor_optab;
7605 goto binop;
7607 case LSHIFT_EXPR:
7608 case RSHIFT_EXPR:
7609 case LROTATE_EXPR:
7610 case RROTATE_EXPR:
7611 preexpand_calls (exp);
7612 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7613 subtarget = 0;
7614 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7615 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7616 unsignedp);
7618 /* Could determine the answer when only additive constants differ. Also,
7619 the addition of one can be handled by changing the condition. */
7620 case LT_EXPR:
7621 case LE_EXPR:
7622 case GT_EXPR:
7623 case GE_EXPR:
7624 case EQ_EXPR:
7625 case NE_EXPR:
7626 case UNORDERED_EXPR:
7627 case ORDERED_EXPR:
7628 case UNLT_EXPR:
7629 case UNLE_EXPR:
7630 case UNGT_EXPR:
7631 case UNGE_EXPR:
7632 case UNEQ_EXPR:
7633 preexpand_calls (exp);
7634 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7635 if (temp != 0)
7636 return temp;
7638 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7639 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7640 && original_target
7641 && GET_CODE (original_target) == REG
7642 && (GET_MODE (original_target)
7643 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7645 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7646 VOIDmode, 0);
7648 if (temp != original_target)
7649 temp = copy_to_reg (temp);
7651 op1 = gen_label_rtx ();
7652 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7653 GET_MODE (temp), unsignedp, 0, op1);
7654 emit_move_insn (temp, const1_rtx);
7655 emit_label (op1);
7656 return temp;
7659 /* If no set-flag instruction, must generate a conditional
7660 store into a temporary variable. Drop through
7661 and handle this like && and ||. */
7663 case TRUTH_ANDIF_EXPR:
7664 case TRUTH_ORIF_EXPR:
7665 if (! ignore
7666 && (target == 0 || ! safe_from_p (target, exp, 1)
7667 /* Make sure we don't have a hard reg (such as function's return
7668 value) live across basic blocks, if not optimizing. */
7669 || (!optimize && GET_CODE (target) == REG
7670 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7671 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7673 if (target)
7674 emit_clr_insn (target);
7676 op1 = gen_label_rtx ();
7677 jumpifnot (exp, op1);
7679 if (target)
7680 emit_0_to_1_insn (target);
7682 emit_label (op1);
7683 return ignore ? const0_rtx : target;
7685 case TRUTH_NOT_EXPR:
7686 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7687 /* The parser is careful to generate TRUTH_NOT_EXPR
7688 only with operands that are always zero or one. */
7689 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7690 target, 1, OPTAB_LIB_WIDEN);
7691 if (temp == 0)
7692 abort ();
7693 return temp;
7695 case COMPOUND_EXPR:
7696 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7697 emit_queue ();
7698 return expand_expr (TREE_OPERAND (exp, 1),
7699 (ignore ? const0_rtx : target),
7700 VOIDmode, 0);
7702 case COND_EXPR:
7703 /* If we would have a "singleton" (see below) were it not for a
7704 conversion in each arm, bring that conversion back out. */
7705 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7706 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7707 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7708 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7710 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7711 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7713 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7714 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7715 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7716 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7717 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7718 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7719 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7720 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7721 return expand_expr (build1 (NOP_EXPR, type,
7722 build (COND_EXPR, TREE_TYPE (true),
7723 TREE_OPERAND (exp, 0),
7724 true, false)),
7725 target, tmode, modifier);
7729 /* Note that COND_EXPRs whose type is a structure or union
7730 are required to be constructed to contain assignments of
7731 a temporary variable, so that we can evaluate them here
7732 for side effect only. If type is void, we must do likewise. */
7734 /* If an arm of the branch requires a cleanup,
7735 only that cleanup is performed. */
7737 tree singleton = 0;
7738 tree binary_op = 0, unary_op = 0;
7740 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7741 convert it to our mode, if necessary. */
7742 if (integer_onep (TREE_OPERAND (exp, 1))
7743 && integer_zerop (TREE_OPERAND (exp, 2))
7744 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7746 if (ignore)
7748 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7749 ro_modifier);
7750 return const0_rtx;
7753 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7754 if (GET_MODE (op0) == mode)
7755 return op0;
7757 if (target == 0)
7758 target = gen_reg_rtx (mode);
7759 convert_move (target, op0, unsignedp);
7760 return target;
7763 /* Check for X ? A + B : A. If we have this, we can copy A to the
7764 output and conditionally add B. Similarly for unary operations.
7765 Don't do this if X has side-effects because those side effects
7766 might affect A or B and the "?" operation is a sequence point in
7767 ANSI. (operand_equal_p tests for side effects.) */
7769 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7770 && operand_equal_p (TREE_OPERAND (exp, 2),
7771 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7772 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7773 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7774 && operand_equal_p (TREE_OPERAND (exp, 1),
7775 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7776 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7777 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7778 && operand_equal_p (TREE_OPERAND (exp, 2),
7779 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7780 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7781 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7782 && operand_equal_p (TREE_OPERAND (exp, 1),
7783 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7784 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7786 /* If we are not to produce a result, we have no target. Otherwise,
7787 if a target was specified use it; it will not be used as an
7788 intermediate target unless it is safe. If no target, use a
7789 temporary. */
7791 if (ignore)
7792 temp = 0;
7793 else if (original_target
7794 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7795 || (singleton && GET_CODE (original_target) == REG
7796 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7797 && original_target == var_rtx (singleton)))
7798 && GET_MODE (original_target) == mode
7799 #ifdef HAVE_conditional_move
7800 && (! can_conditionally_move_p (mode)
7801 || GET_CODE (original_target) == REG
7802 || TREE_ADDRESSABLE (type))
7803 #endif
7804 && ! (GET_CODE (original_target) == MEM
7805 && MEM_VOLATILE_P (original_target)))
7806 temp = original_target;
7807 else if (TREE_ADDRESSABLE (type))
7808 abort ();
7809 else
7810 temp = assign_temp (type, 0, 0, 1);
7812 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7813 do the test of X as a store-flag operation, do this as
7814 A + ((X != 0) << log C). Similarly for other simple binary
7815 operators. Only do for C == 1 if BRANCH_COST is low. */
7816 if (temp && singleton && binary_op
7817 && (TREE_CODE (binary_op) == PLUS_EXPR
7818 || TREE_CODE (binary_op) == MINUS_EXPR
7819 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7820 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7821 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7822 : integer_onep (TREE_OPERAND (binary_op, 1)))
7823 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7825 rtx result;
7826 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7827 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7828 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7829 : xor_optab);
7831 /* If we had X ? A : A + 1, do this as A + (X == 0).
7833 We have to invert the truth value here and then put it
7834 back later if do_store_flag fails. We cannot simply copy
7835 TREE_OPERAND (exp, 0) to another variable and modify that
7836 because invert_truthvalue can modify the tree pointed to
7837 by its argument. */
7838 if (singleton == TREE_OPERAND (exp, 1))
7839 TREE_OPERAND (exp, 0)
7840 = invert_truthvalue (TREE_OPERAND (exp, 0));
7842 result = do_store_flag (TREE_OPERAND (exp, 0),
7843 (safe_from_p (temp, singleton, 1)
7844 ? temp : NULL_RTX),
7845 mode, BRANCH_COST <= 1);
7847 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7848 result = expand_shift (LSHIFT_EXPR, mode, result,
7849 build_int_2 (tree_log2
7850 (TREE_OPERAND
7851 (binary_op, 1)),
7853 (safe_from_p (temp, singleton, 1)
7854 ? temp : NULL_RTX), 0);
7856 if (result)
7858 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7859 return expand_binop (mode, boptab, op1, result, temp,
7860 unsignedp, OPTAB_LIB_WIDEN);
7862 else if (singleton == TREE_OPERAND (exp, 1))
7863 TREE_OPERAND (exp, 0)
7864 = invert_truthvalue (TREE_OPERAND (exp, 0));
7867 do_pending_stack_adjust ();
7868 NO_DEFER_POP;
7869 op0 = gen_label_rtx ();
7871 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7873 if (temp != 0)
7875 /* If the target conflicts with the other operand of the
7876 binary op, we can't use it. Also, we can't use the target
7877 if it is a hard register, because evaluating the condition
7878 might clobber it. */
7879 if ((binary_op
7880 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7881 || (GET_CODE (temp) == REG
7882 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7883 temp = gen_reg_rtx (mode);
7884 store_expr (singleton, temp, 0);
7886 else
7887 expand_expr (singleton,
7888 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7889 if (singleton == TREE_OPERAND (exp, 1))
7890 jumpif (TREE_OPERAND (exp, 0), op0);
7891 else
7892 jumpifnot (TREE_OPERAND (exp, 0), op0);
7894 start_cleanup_deferral ();
7895 if (binary_op && temp == 0)
7896 /* Just touch the other operand. */
7897 expand_expr (TREE_OPERAND (binary_op, 1),
7898 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7899 else if (binary_op)
7900 store_expr (build (TREE_CODE (binary_op), type,
7901 make_tree (type, temp),
7902 TREE_OPERAND (binary_op, 1)),
7903 temp, 0);
7904 else
7905 store_expr (build1 (TREE_CODE (unary_op), type,
7906 make_tree (type, temp)),
7907 temp, 0);
7908 op1 = op0;
7910 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7911 comparison operator. If we have one of these cases, set the
7912 output to A, branch on A (cse will merge these two references),
7913 then set the output to FOO. */
7914 else if (temp
7915 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7916 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7917 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7918 TREE_OPERAND (exp, 1), 0)
7919 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7920 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7921 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7923 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7924 temp = gen_reg_rtx (mode);
7925 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7926 jumpif (TREE_OPERAND (exp, 0), op0);
7928 start_cleanup_deferral ();
7929 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7930 op1 = op0;
7932 else if (temp
7933 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7934 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7935 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7936 TREE_OPERAND (exp, 2), 0)
7937 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7938 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7939 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7941 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7942 temp = gen_reg_rtx (mode);
7943 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7944 jumpifnot (TREE_OPERAND (exp, 0), op0);
7946 start_cleanup_deferral ();
7947 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7948 op1 = op0;
7950 else
7952 op1 = gen_label_rtx ();
7953 jumpifnot (TREE_OPERAND (exp, 0), op0);
7955 start_cleanup_deferral ();
7957 /* One branch of the cond can be void, if it never returns. For
7958 example A ? throw : E */
7959 if (temp != 0
7960 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
7961 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7962 else
7963 expand_expr (TREE_OPERAND (exp, 1),
7964 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7965 end_cleanup_deferral ();
7966 emit_queue ();
7967 emit_jump_insn (gen_jump (op1));
7968 emit_barrier ();
7969 emit_label (op0);
7970 start_cleanup_deferral ();
7971 if (temp != 0
7972 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
7973 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7974 else
7975 expand_expr (TREE_OPERAND (exp, 2),
7976 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7979 end_cleanup_deferral ();
7981 emit_queue ();
7982 emit_label (op1);
7983 OK_DEFER_POP;
7985 return temp;
7988 case TARGET_EXPR:
7990 /* Something needs to be initialized, but we didn't know
7991 where that thing was when building the tree. For example,
7992 it could be the return value of a function, or a parameter
7993 to a function which lays down in the stack, or a temporary
7994 variable which must be passed by reference.
7996 We guarantee that the expression will either be constructed
7997 or copied into our original target. */
7999 tree slot = TREE_OPERAND (exp, 0);
8000 tree cleanups = NULL_TREE;
8001 tree exp1;
8003 if (TREE_CODE (slot) != VAR_DECL)
8004 abort ();
8006 if (! ignore)
8007 target = original_target;
8009 /* Set this here so that if we get a target that refers to a
8010 register variable that's already been used, put_reg_into_stack
8011 knows that it should fix up those uses. */
8012 TREE_USED (slot) = 1;
8014 if (target == 0)
8016 if (DECL_RTL (slot) != 0)
8018 target = DECL_RTL (slot);
8019 /* If we have already expanded the slot, so don't do
8020 it again. (mrs) */
8021 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8022 return target;
8024 else
8026 target = assign_temp (type, 2, 0, 1);
8027 /* All temp slots at this level must not conflict. */
8028 preserve_temp_slots (target);
8029 DECL_RTL (slot) = target;
8030 if (TREE_ADDRESSABLE (slot))
8032 TREE_ADDRESSABLE (slot) = 0;
8033 mark_addressable (slot);
8036 /* Since SLOT is not known to the called function
8037 to belong to its stack frame, we must build an explicit
8038 cleanup. This case occurs when we must build up a reference
8039 to pass the reference as an argument. In this case,
8040 it is very likely that such a reference need not be
8041 built here. */
8043 if (TREE_OPERAND (exp, 2) == 0)
8044 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8045 cleanups = TREE_OPERAND (exp, 2);
8048 else
8050 /* This case does occur, when expanding a parameter which
8051 needs to be constructed on the stack. The target
8052 is the actual stack address that we want to initialize.
8053 The function we call will perform the cleanup in this case. */
8055 /* If we have already assigned it space, use that space,
8056 not target that we were passed in, as our target
8057 parameter is only a hint. */
8058 if (DECL_RTL (slot) != 0)
8060 target = DECL_RTL (slot);
8061 /* If we have already expanded the slot, so don't do
8062 it again. (mrs) */
8063 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8064 return target;
8066 else
8068 DECL_RTL (slot) = target;
8069 /* If we must have an addressable slot, then make sure that
8070 the RTL that we just stored in slot is OK. */
8071 if (TREE_ADDRESSABLE (slot))
8073 TREE_ADDRESSABLE (slot) = 0;
8074 mark_addressable (slot);
8079 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8080 /* Mark it as expanded. */
8081 TREE_OPERAND (exp, 1) = NULL_TREE;
8083 store_expr (exp1, target, 0);
8085 expand_decl_cleanup (NULL_TREE, cleanups);
8087 return target;
8090 case INIT_EXPR:
8092 tree lhs = TREE_OPERAND (exp, 0);
8093 tree rhs = TREE_OPERAND (exp, 1);
8094 tree noncopied_parts = 0;
8095 tree lhs_type = TREE_TYPE (lhs);
8097 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8098 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8099 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8100 TYPE_NONCOPIED_PARTS (lhs_type));
8101 while (noncopied_parts != 0)
8103 expand_assignment (TREE_VALUE (noncopied_parts),
8104 TREE_PURPOSE (noncopied_parts), 0, 0);
8105 noncopied_parts = TREE_CHAIN (noncopied_parts);
8107 return temp;
8110 case MODIFY_EXPR:
8112 /* If lhs is complex, expand calls in rhs before computing it.
8113 That's so we don't compute a pointer and save it over a call.
8114 If lhs is simple, compute it first so we can give it as a
8115 target if the rhs is just a call. This avoids an extra temp and copy
8116 and that prevents a partial-subsumption which makes bad code.
8117 Actually we could treat component_ref's of vars like vars. */
8119 tree lhs = TREE_OPERAND (exp, 0);
8120 tree rhs = TREE_OPERAND (exp, 1);
8121 tree noncopied_parts = 0;
8122 tree lhs_type = TREE_TYPE (lhs);
8124 temp = 0;
8126 if (TREE_CODE (lhs) != VAR_DECL
8127 && TREE_CODE (lhs) != RESULT_DECL
8128 && TREE_CODE (lhs) != PARM_DECL
8129 && ! (TREE_CODE (lhs) == INDIRECT_REF
8130 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
8131 preexpand_calls (exp);
8133 /* Check for |= or &= of a bitfield of size one into another bitfield
8134 of size 1. In this case, (unless we need the result of the
8135 assignment) we can do this more efficiently with a
8136 test followed by an assignment, if necessary.
8138 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8139 things change so we do, this code should be enhanced to
8140 support it. */
8141 if (ignore
8142 && TREE_CODE (lhs) == COMPONENT_REF
8143 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8144 || TREE_CODE (rhs) == BIT_AND_EXPR)
8145 && TREE_OPERAND (rhs, 0) == lhs
8146 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8147 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
8148 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
8150 rtx label = gen_label_rtx ();
8152 do_jump (TREE_OPERAND (rhs, 1),
8153 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8154 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8155 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8156 (TREE_CODE (rhs) == BIT_IOR_EXPR
8157 ? integer_one_node
8158 : integer_zero_node)),
8159 0, 0);
8160 do_pending_stack_adjust ();
8161 emit_label (label);
8162 return const0_rtx;
8165 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8166 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8167 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8168 TYPE_NONCOPIED_PARTS (lhs_type));
8170 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8171 while (noncopied_parts != 0)
8173 expand_assignment (TREE_PURPOSE (noncopied_parts),
8174 TREE_VALUE (noncopied_parts), 0, 0);
8175 noncopied_parts = TREE_CHAIN (noncopied_parts);
8177 return temp;
8180 case RETURN_EXPR:
8181 if (!TREE_OPERAND (exp, 0))
8182 expand_null_return ();
8183 else
8184 expand_return (TREE_OPERAND (exp, 0));
8185 return const0_rtx;
8187 case PREINCREMENT_EXPR:
8188 case PREDECREMENT_EXPR:
8189 return expand_increment (exp, 0, ignore);
8191 case POSTINCREMENT_EXPR:
8192 case POSTDECREMENT_EXPR:
8193 /* Faster to treat as pre-increment if result is not used. */
8194 return expand_increment (exp, ! ignore, ignore);
8196 case ADDR_EXPR:
8197 /* If nonzero, TEMP will be set to the address of something that might
8198 be a MEM corresponding to a stack slot. */
8199 temp = 0;
8201 /* Are we taking the address of a nested function? */
8202 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8203 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8204 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8205 && ! TREE_STATIC (exp))
8207 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8208 op0 = force_operand (op0, target);
8210 /* If we are taking the address of something erroneous, just
8211 return a zero. */
8212 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8213 return const0_rtx;
8214 else
8216 /* We make sure to pass const0_rtx down if we came in with
8217 ignore set, to avoid doing the cleanups twice for something. */
8218 op0 = expand_expr (TREE_OPERAND (exp, 0),
8219 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8220 (modifier == EXPAND_INITIALIZER
8221 ? modifier : EXPAND_CONST_ADDRESS));
8223 /* If we are going to ignore the result, OP0 will have been set
8224 to const0_rtx, so just return it. Don't get confused and
8225 think we are taking the address of the constant. */
8226 if (ignore)
8227 return op0;
8229 op0 = protect_from_queue (op0, 0);
8231 /* We would like the object in memory. If it is a constant, we can
8232 have it be statically allocated into memory. For a non-constant,
8233 we need to allocate some memory and store the value into it. */
8235 if (CONSTANT_P (op0))
8236 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8237 op0);
8238 else if (GET_CODE (op0) == MEM)
8240 mark_temp_addr_taken (op0);
8241 temp = XEXP (op0, 0);
8244 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8245 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8247 /* If this object is in a register, it must be not
8248 be BLKmode. */
8249 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8250 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8252 mark_temp_addr_taken (memloc);
8253 emit_move_insn (memloc, op0);
8254 op0 = memloc;
8257 if (GET_CODE (op0) != MEM)
8258 abort ();
8260 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8262 temp = XEXP (op0, 0);
8263 #ifdef POINTERS_EXTEND_UNSIGNED
8264 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8265 && mode == ptr_mode)
8266 temp = convert_memory_address (ptr_mode, temp);
8267 #endif
8268 return temp;
8271 op0 = force_operand (XEXP (op0, 0), target);
8274 if (flag_force_addr && GET_CODE (op0) != REG)
8275 op0 = force_reg (Pmode, op0);
8277 if (GET_CODE (op0) == REG
8278 && ! REG_USERVAR_P (op0))
8279 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
8281 /* If we might have had a temp slot, add an equivalent address
8282 for it. */
8283 if (temp != 0)
8284 update_temp_slot_address (temp, op0);
8286 #ifdef POINTERS_EXTEND_UNSIGNED
8287 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8288 && mode == ptr_mode)
8289 op0 = convert_memory_address (ptr_mode, op0);
8290 #endif
8292 return op0;
8294 case ENTRY_VALUE_EXPR:
8295 abort ();
8297 /* COMPLEX type for Extended Pascal & Fortran */
8298 case COMPLEX_EXPR:
8300 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8301 rtx insns;
8303 /* Get the rtx code of the operands. */
8304 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8305 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8307 if (! target)
8308 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8310 start_sequence ();
8312 /* Move the real (op0) and imaginary (op1) parts to their location. */
8313 emit_move_insn (gen_realpart (mode, target), op0);
8314 emit_move_insn (gen_imagpart (mode, target), op1);
8316 insns = get_insns ();
8317 end_sequence ();
8319 /* Complex construction should appear as a single unit. */
8320 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8321 each with a separate pseudo as destination.
8322 It's not correct for flow to treat them as a unit. */
8323 if (GET_CODE (target) != CONCAT)
8324 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8325 else
8326 emit_insns (insns);
8328 return target;
8331 case REALPART_EXPR:
8332 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8333 return gen_realpart (mode, op0);
8335 case IMAGPART_EXPR:
8336 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8337 return gen_imagpart (mode, op0);
8339 case CONJ_EXPR:
8341 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8342 rtx imag_t;
8343 rtx insns;
8345 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8347 if (! target)
8348 target = gen_reg_rtx (mode);
8350 start_sequence ();
8352 /* Store the realpart and the negated imagpart to target. */
8353 emit_move_insn (gen_realpart (partmode, target),
8354 gen_realpart (partmode, op0));
8356 imag_t = gen_imagpart (partmode, target);
8357 temp = expand_unop (partmode, neg_optab,
8358 gen_imagpart (partmode, op0), imag_t, 0);
8359 if (temp != imag_t)
8360 emit_move_insn (imag_t, temp);
8362 insns = get_insns ();
8363 end_sequence ();
8365 /* Conjugate should appear as a single unit
8366 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8367 each with a separate pseudo as destination.
8368 It's not correct for flow to treat them as a unit. */
8369 if (GET_CODE (target) != CONCAT)
8370 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8371 else
8372 emit_insns (insns);
8374 return target;
8377 case TRY_CATCH_EXPR:
8379 tree handler = TREE_OPERAND (exp, 1);
8381 expand_eh_region_start ();
8383 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8385 expand_eh_region_end (handler);
8387 return op0;
8390 case TRY_FINALLY_EXPR:
8392 tree try_block = TREE_OPERAND (exp, 0);
8393 tree finally_block = TREE_OPERAND (exp, 1);
8394 rtx finally_label = gen_label_rtx ();
8395 rtx done_label = gen_label_rtx ();
8396 rtx return_link = gen_reg_rtx (Pmode);
8397 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8398 (tree) finally_label, (tree) return_link);
8399 TREE_SIDE_EFFECTS (cleanup) = 1;
8401 /* Start a new binding layer that will keep track of all cleanup
8402 actions to be performed. */
8403 expand_start_bindings (2);
8405 target_temp_slot_level = temp_slot_level;
8407 expand_decl_cleanup (NULL_TREE, cleanup);
8408 op0 = expand_expr (try_block, target, tmode, modifier);
8410 preserve_temp_slots (op0);
8411 expand_end_bindings (NULL_TREE, 0, 0);
8412 emit_jump (done_label);
8413 emit_label (finally_label);
8414 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8415 emit_indirect_jump (return_link);
8416 emit_label (done_label);
8417 return op0;
8420 case GOTO_SUBROUTINE_EXPR:
8422 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8423 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8424 rtx return_address = gen_label_rtx ();
8425 emit_move_insn (return_link, gen_rtx_LABEL_REF (Pmode, return_address));
8426 emit_jump (subr);
8427 emit_label (return_address);
8428 return const0_rtx;
8431 case POPDCC_EXPR:
8433 rtx dcc = get_dynamic_cleanup_chain ();
8434 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8435 return const0_rtx;
8438 case POPDHC_EXPR:
8440 rtx dhc = get_dynamic_handler_chain ();
8441 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8442 return const0_rtx;
8445 case VA_ARG_EXPR:
8446 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8448 default:
8449 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8452 /* Here to do an ordinary binary operator, generating an instruction
8453 from the optab already placed in `this_optab'. */
8454 binop:
8455 preexpand_calls (exp);
8456 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8457 subtarget = 0;
8458 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8459 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8460 binop2:
8461 temp = expand_binop (mode, this_optab, op0, op1, target,
8462 unsignedp, OPTAB_LIB_WIDEN);
8463 if (temp == 0)
8464 abort ();
8465 return temp;
8468 /* Similar to expand_expr, except that we don't specify a target, target
8469 mode, or modifier and we return the alignment of the inner type. This is
8470 used in cases where it is not necessary to align the result to the
8471 alignment of its type as long as we know the alignment of the result, for
8472 example for comparisons of BLKmode values. */
8474 static rtx
8475 expand_expr_unaligned (exp, palign)
8476 register tree exp;
8477 unsigned int *palign;
8479 register rtx op0;
8480 tree type = TREE_TYPE (exp);
8481 register enum machine_mode mode = TYPE_MODE (type);
8483 /* Default the alignment we return to that of the type. */
8484 *palign = TYPE_ALIGN (type);
8486 /* The only cases in which we do anything special is if the resulting mode
8487 is BLKmode. */
8488 if (mode != BLKmode)
8489 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8491 switch (TREE_CODE (exp))
8493 case CONVERT_EXPR:
8494 case NOP_EXPR:
8495 case NON_LVALUE_EXPR:
8496 /* Conversions between BLKmode values don't change the underlying
8497 alignment or value. */
8498 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8499 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8500 break;
8502 case ARRAY_REF:
8503 /* Much of the code for this case is copied directly from expand_expr.
8504 We need to duplicate it here because we will do something different
8505 in the fall-through case, so we need to handle the same exceptions
8506 it does. */
8508 tree array = TREE_OPERAND (exp, 0);
8509 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8510 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8511 tree index = TREE_OPERAND (exp, 1);
8512 tree index_type = TREE_TYPE (index);
8513 HOST_WIDE_INT i;
8515 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8516 abort ();
8518 /* Optimize the special-case of a zero lower bound.
8520 We convert the low_bound to sizetype to avoid some problems
8521 with constant folding. (E.g. suppose the lower bound is 1,
8522 and its mode is QI. Without the conversion, (ARRAY
8523 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8524 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
8526 But sizetype isn't quite right either (especially if
8527 the lowbound is negative). FIXME */
8529 if (! integer_zerop (low_bound))
8530 index = fold (build (MINUS_EXPR, index_type, index,
8531 convert (sizetype, low_bound)));
8533 /* If this is a constant index into a constant array,
8534 just get the value from the array. Handle both the cases when
8535 we have an explicit constructor and when our operand is a variable
8536 that was declared const. */
8538 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
8540 if (TREE_CODE (index) == INTEGER_CST
8541 && TREE_INT_CST_HIGH (index) == 0)
8543 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
8545 i = TREE_INT_CST_LOW (index);
8546 while (elem && i--)
8547 elem = TREE_CHAIN (elem);
8548 if (elem)
8549 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8550 palign);
8554 else if (optimize >= 1
8555 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8556 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8557 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8559 if (TREE_CODE (index) == INTEGER_CST)
8561 tree init = DECL_INITIAL (array);
8563 i = TREE_INT_CST_LOW (index);
8564 if (TREE_CODE (init) == CONSTRUCTOR)
8566 tree elem = CONSTRUCTOR_ELTS (init);
8568 while (elem
8569 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
8570 elem = TREE_CHAIN (elem);
8571 if (elem)
8572 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8573 palign);
8579 /* ... fall through ... */
8581 case COMPONENT_REF:
8582 case BIT_FIELD_REF:
8583 /* If the operand is a CONSTRUCTOR, we can just extract the
8584 appropriate field if it is present. Don't do this if we have
8585 already written the data since we want to refer to that copy
8586 and varasm.c assumes that's what we'll do. */
8587 if (TREE_CODE (exp) != ARRAY_REF
8588 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8589 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8591 tree elt;
8593 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8594 elt = TREE_CHAIN (elt))
8595 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8596 /* Note that unlike the case in expand_expr, we know this is
8597 BLKmode and hence not an integer. */
8598 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8602 enum machine_mode mode1;
8603 int bitsize;
8604 int bitpos;
8605 tree offset;
8606 int volatilep = 0;
8607 unsigned int alignment;
8608 int unsignedp;
8609 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8610 &mode1, &unsignedp, &volatilep,
8611 &alignment);
8613 /* If we got back the original object, something is wrong. Perhaps
8614 we are evaluating an expression too early. In any event, don't
8615 infinitely recurse. */
8616 if (tem == exp)
8617 abort ();
8619 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8621 /* If this is a constant, put it into a register if it is a
8622 legitimate constant and OFFSET is 0 and memory if it isn't. */
8623 if (CONSTANT_P (op0))
8625 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8627 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8628 && offset == 0)
8629 op0 = force_reg (inner_mode, op0);
8630 else
8631 op0 = validize_mem (force_const_mem (inner_mode, op0));
8634 if (offset != 0)
8636 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8638 /* If this object is in a register, put it into memory.
8639 This case can't occur in C, but can in Ada if we have
8640 unchecked conversion of an expression from a scalar type to
8641 an array or record type. */
8642 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8643 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8645 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
8647 mark_temp_addr_taken (memloc);
8648 emit_move_insn (memloc, op0);
8649 op0 = memloc;
8652 if (GET_CODE (op0) != MEM)
8653 abort ();
8655 if (GET_MODE (offset_rtx) != ptr_mode)
8657 #ifdef POINTERS_EXTEND_UNSIGNED
8658 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8659 #else
8660 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8661 #endif
8664 op0 = change_address (op0, VOIDmode,
8665 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
8666 force_reg (ptr_mode,
8667 offset_rtx)));
8670 /* Don't forget about volatility even if this is a bitfield. */
8671 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
8673 op0 = copy_rtx (op0);
8674 MEM_VOLATILE_P (op0) = 1;
8677 /* Check the access. */
8678 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
8680 rtx to;
8681 int size;
8683 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
8684 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
8686 /* Check the access right of the pointer. */
8687 if (size > BITS_PER_UNIT)
8688 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
8689 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
8690 TYPE_MODE (sizetype),
8691 GEN_INT (MEMORY_USE_RO),
8692 TYPE_MODE (integer_type_node));
8695 /* In cases where an aligned union has an unaligned object
8696 as a field, we might be extracting a BLKmode value from
8697 an integer-mode (e.g., SImode) object. Handle this case
8698 by doing the extract into an object as wide as the field
8699 (which we know to be the width of a basic mode), then
8700 storing into memory, and changing the mode to BLKmode.
8701 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8702 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8703 if (mode1 == VOIDmode
8704 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8705 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
8706 && (TYPE_ALIGN (type) > alignment * BITS_PER_UNIT
8707 || bitpos % TYPE_ALIGN (type) != 0)))
8709 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
8711 if (ext_mode == BLKmode)
8713 /* In this case, BITPOS must start at a byte boundary. */
8714 if (GET_CODE (op0) != MEM
8715 || bitpos % BITS_PER_UNIT != 0)
8716 abort ();
8718 op0 = change_address (op0, VOIDmode,
8719 plus_constant (XEXP (op0, 0),
8720 bitpos / BITS_PER_UNIT));
8722 else
8724 rtx new = assign_stack_temp (ext_mode,
8725 bitsize / BITS_PER_UNIT, 0);
8727 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
8728 unsignedp, NULL_RTX, ext_mode,
8729 ext_mode, alignment,
8730 int_size_in_bytes (TREE_TYPE (tem)));
8732 /* If the result is a record type and BITSIZE is narrower than
8733 the mode of OP0, an integral mode, and this is a big endian
8734 machine, we must put the field into the high-order bits. */
8735 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
8736 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8737 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
8738 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
8739 size_int (GET_MODE_BITSIZE
8740 (GET_MODE (op0))
8741 - bitsize),
8742 op0, 1);
8745 emit_move_insn (new, op0);
8746 op0 = copy_rtx (new);
8747 PUT_MODE (op0, BLKmode);
8750 else
8751 /* Get a reference to just this component. */
8752 op0 = change_address (op0, mode1,
8753 plus_constant (XEXP (op0, 0),
8754 (bitpos / BITS_PER_UNIT)));
8756 MEM_ALIAS_SET (op0) = get_alias_set (exp);
8758 /* Adjust the alignment in case the bit position is not
8759 a multiple of the alignment of the inner object. */
8760 while (bitpos % alignment != 0)
8761 alignment >>= 1;
8763 if (GET_CODE (XEXP (op0, 0)) == REG)
8764 mark_reg_pointer (XEXP (op0, 0), alignment);
8766 MEM_IN_STRUCT_P (op0) = 1;
8767 MEM_VOLATILE_P (op0) |= volatilep;
8769 *palign = alignment;
8770 return op0;
8773 default:
8774 break;
8778 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8781 /* Return the tree node and offset if a given argument corresponds to
8782 a string constant. */
8784 tree
8785 string_constant (arg, ptr_offset)
8786 tree arg;
8787 tree *ptr_offset;
8789 STRIP_NOPS (arg);
8791 if (TREE_CODE (arg) == ADDR_EXPR
8792 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8794 *ptr_offset = integer_zero_node;
8795 return TREE_OPERAND (arg, 0);
8797 else if (TREE_CODE (arg) == PLUS_EXPR)
8799 tree arg0 = TREE_OPERAND (arg, 0);
8800 tree arg1 = TREE_OPERAND (arg, 1);
8802 STRIP_NOPS (arg0);
8803 STRIP_NOPS (arg1);
8805 if (TREE_CODE (arg0) == ADDR_EXPR
8806 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8808 *ptr_offset = arg1;
8809 return TREE_OPERAND (arg0, 0);
8811 else if (TREE_CODE (arg1) == ADDR_EXPR
8812 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8814 *ptr_offset = arg0;
8815 return TREE_OPERAND (arg1, 0);
8819 return 0;
8822 /* Expand code for a post- or pre- increment or decrement
8823 and return the RTX for the result.
8824 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8826 static rtx
8827 expand_increment (exp, post, ignore)
8828 register tree exp;
8829 int post, ignore;
8831 register rtx op0, op1;
8832 register rtx temp, value;
8833 register tree incremented = TREE_OPERAND (exp, 0);
8834 optab this_optab = add_optab;
8835 int icode;
8836 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8837 int op0_is_copy = 0;
8838 int single_insn = 0;
8839 /* 1 means we can't store into OP0 directly,
8840 because it is a subreg narrower than a word,
8841 and we don't dare clobber the rest of the word. */
8842 int bad_subreg = 0;
8844 /* Stabilize any component ref that might need to be
8845 evaluated more than once below. */
8846 if (!post
8847 || TREE_CODE (incremented) == BIT_FIELD_REF
8848 || (TREE_CODE (incremented) == COMPONENT_REF
8849 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8850 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8851 incremented = stabilize_reference (incremented);
8852 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8853 ones into save exprs so that they don't accidentally get evaluated
8854 more than once by the code below. */
8855 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8856 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8857 incremented = save_expr (incremented);
8859 /* Compute the operands as RTX.
8860 Note whether OP0 is the actual lvalue or a copy of it:
8861 I believe it is a copy iff it is a register or subreg
8862 and insns were generated in computing it. */
8864 temp = get_last_insn ();
8865 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
8867 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8868 in place but instead must do sign- or zero-extension during assignment,
8869 so we copy it into a new register and let the code below use it as
8870 a copy.
8872 Note that we can safely modify this SUBREG since it is know not to be
8873 shared (it was made by the expand_expr call above). */
8875 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8877 if (post)
8878 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8879 else
8880 bad_subreg = 1;
8882 else if (GET_CODE (op0) == SUBREG
8883 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8885 /* We cannot increment this SUBREG in place. If we are
8886 post-incrementing, get a copy of the old value. Otherwise,
8887 just mark that we cannot increment in place. */
8888 if (post)
8889 op0 = copy_to_reg (op0);
8890 else
8891 bad_subreg = 1;
8894 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8895 && temp != get_last_insn ());
8896 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8897 EXPAND_MEMORY_USE_BAD);
8899 /* Decide whether incrementing or decrementing. */
8900 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8901 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8902 this_optab = sub_optab;
8904 /* Convert decrement by a constant into a negative increment. */
8905 if (this_optab == sub_optab
8906 && GET_CODE (op1) == CONST_INT)
8908 op1 = GEN_INT (- INTVAL (op1));
8909 this_optab = add_optab;
8912 /* For a preincrement, see if we can do this with a single instruction. */
8913 if (!post)
8915 icode = (int) this_optab->handlers[(int) mode].insn_code;
8916 if (icode != (int) CODE_FOR_nothing
8917 /* Make sure that OP0 is valid for operands 0 and 1
8918 of the insn we want to queue. */
8919 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8920 && (*insn_data[icode].operand[1].predicate) (op0, mode)
8921 && (*insn_data[icode].operand[2].predicate) (op1, mode))
8922 single_insn = 1;
8925 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8926 then we cannot just increment OP0. We must therefore contrive to
8927 increment the original value. Then, for postincrement, we can return
8928 OP0 since it is a copy of the old value. For preincrement, expand here
8929 unless we can do it with a single insn.
8931 Likewise if storing directly into OP0 would clobber high bits
8932 we need to preserve (bad_subreg). */
8933 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8935 /* This is the easiest way to increment the value wherever it is.
8936 Problems with multiple evaluation of INCREMENTED are prevented
8937 because either (1) it is a component_ref or preincrement,
8938 in which case it was stabilized above, or (2) it is an array_ref
8939 with constant index in an array in a register, which is
8940 safe to reevaluate. */
8941 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8942 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8943 ? MINUS_EXPR : PLUS_EXPR),
8944 TREE_TYPE (exp),
8945 incremented,
8946 TREE_OPERAND (exp, 1));
8948 while (TREE_CODE (incremented) == NOP_EXPR
8949 || TREE_CODE (incremented) == CONVERT_EXPR)
8951 newexp = convert (TREE_TYPE (incremented), newexp);
8952 incremented = TREE_OPERAND (incremented, 0);
8955 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
8956 return post ? op0 : temp;
8959 if (post)
8961 /* We have a true reference to the value in OP0.
8962 If there is an insn to add or subtract in this mode, queue it.
8963 Queueing the increment insn avoids the register shuffling
8964 that often results if we must increment now and first save
8965 the old value for subsequent use. */
8967 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8968 op0 = stabilize (op0);
8969 #endif
8971 icode = (int) this_optab->handlers[(int) mode].insn_code;
8972 if (icode != (int) CODE_FOR_nothing
8973 /* Make sure that OP0 is valid for operands 0 and 1
8974 of the insn we want to queue. */
8975 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8976 && (*insn_data[icode].operand[1].predicate) (op0, mode))
8978 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8979 op1 = force_reg (mode, op1);
8981 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8983 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
8985 rtx addr = (general_operand (XEXP (op0, 0), mode)
8986 ? force_reg (Pmode, XEXP (op0, 0))
8987 : copy_to_reg (XEXP (op0, 0)));
8988 rtx temp, result;
8990 op0 = change_address (op0, VOIDmode, addr);
8991 temp = force_reg (GET_MODE (op0), op0);
8992 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8993 op1 = force_reg (mode, op1);
8995 /* The increment queue is LIFO, thus we have to `queue'
8996 the instructions in reverse order. */
8997 enqueue_insn (op0, gen_move_insn (op0, temp));
8998 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
8999 return result;
9003 /* Preincrement, or we can't increment with one simple insn. */
9004 if (post)
9005 /* Save a copy of the value before inc or dec, to return it later. */
9006 temp = value = copy_to_reg (op0);
9007 else
9008 /* Arrange to return the incremented value. */
9009 /* Copy the rtx because expand_binop will protect from the queue,
9010 and the results of that would be invalid for us to return
9011 if our caller does emit_queue before using our result. */
9012 temp = copy_rtx (value = op0);
9014 /* Increment however we can. */
9015 op1 = expand_binop (mode, this_optab, value, op1,
9016 current_function_check_memory_usage ? NULL_RTX : op0,
9017 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9018 /* Make sure the value is stored into OP0. */
9019 if (op1 != op0)
9020 emit_move_insn (op0, op1);
9022 return temp;
9025 /* Expand all function calls contained within EXP, innermost ones first.
9026 But don't look within expressions that have sequence points.
9027 For each CALL_EXPR, record the rtx for its value
9028 in the CALL_EXPR_RTL field. */
9030 static void
9031 preexpand_calls (exp)
9032 tree exp;
9034 register int nops, i;
9035 int type = TREE_CODE_CLASS (TREE_CODE (exp));
9037 if (! do_preexpand_calls)
9038 return;
9040 /* Only expressions and references can contain calls. */
9042 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9043 return;
9045 switch (TREE_CODE (exp))
9047 case CALL_EXPR:
9048 /* Do nothing if already expanded. */
9049 if (CALL_EXPR_RTL (exp) != 0
9050 /* Do nothing if the call returns a variable-sized object. */
9051 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9052 /* Do nothing to built-in functions. */
9053 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9054 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9055 == FUNCTION_DECL)
9056 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9057 return;
9059 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9060 return;
9062 case COMPOUND_EXPR:
9063 case COND_EXPR:
9064 case TRUTH_ANDIF_EXPR:
9065 case TRUTH_ORIF_EXPR:
9066 /* If we find one of these, then we can be sure
9067 the adjust will be done for it (since it makes jumps).
9068 Do it now, so that if this is inside an argument
9069 of a function, we don't get the stack adjustment
9070 after some other args have already been pushed. */
9071 do_pending_stack_adjust ();
9072 return;
9074 case BLOCK:
9075 case RTL_EXPR:
9076 case WITH_CLEANUP_EXPR:
9077 case CLEANUP_POINT_EXPR:
9078 case TRY_CATCH_EXPR:
9079 return;
9081 case SAVE_EXPR:
9082 if (SAVE_EXPR_RTL (exp) != 0)
9083 return;
9085 default:
9086 break;
9089 nops = tree_code_length[(int) TREE_CODE (exp)];
9090 for (i = 0; i < nops; i++)
9091 if (TREE_OPERAND (exp, i) != 0)
9093 if (TREE_CODE (exp) == TARGET_EXPR && i == 2)
9094 /* We don't need to preexpand the cleanup for a TARGET_EXPR.
9095 It doesn't happen before the call is made. */
9097 else
9099 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9100 if (type == 'e' || type == '<' || type == '1' || type == '2'
9101 || type == 'r')
9102 preexpand_calls (TREE_OPERAND (exp, i));
9107 /* At the start of a function, record that we have no previously-pushed
9108 arguments waiting to be popped. */
9110 void
9111 init_pending_stack_adjust ()
9113 pending_stack_adjust = 0;
9116 /* When exiting from function, if safe, clear out any pending stack adjust
9117 so the adjustment won't get done.
9119 Note, if the current function calls alloca, then it must have a
9120 frame pointer regardless of the value of flag_omit_frame_pointer. */
9122 void
9123 clear_pending_stack_adjust ()
9125 #ifdef EXIT_IGNORE_STACK
9126 if (optimize > 0
9127 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9128 && EXIT_IGNORE_STACK
9129 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9130 && ! flag_inline_functions)
9131 pending_stack_adjust = 0;
9132 #endif
9135 /* Pop any previously-pushed arguments that have not been popped yet. */
9137 void
9138 do_pending_stack_adjust ()
9140 if (inhibit_defer_pop == 0)
9142 if (pending_stack_adjust != 0)
9143 adjust_stack (GEN_INT (pending_stack_adjust));
9144 pending_stack_adjust = 0;
9148 /* Expand conditional expressions. */
9150 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9151 LABEL is an rtx of code CODE_LABEL, in this function and all the
9152 functions here. */
9154 void
9155 jumpifnot (exp, label)
9156 tree exp;
9157 rtx label;
9159 do_jump (exp, label, NULL_RTX);
9162 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9164 void
9165 jumpif (exp, label)
9166 tree exp;
9167 rtx label;
9169 do_jump (exp, NULL_RTX, label);
9172 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9173 the result is zero, or IF_TRUE_LABEL if the result is one.
9174 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9175 meaning fall through in that case.
9177 do_jump always does any pending stack adjust except when it does not
9178 actually perform a jump. An example where there is no jump
9179 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9181 This function is responsible for optimizing cases such as
9182 &&, || and comparison operators in EXP. */
9184 void
9185 do_jump (exp, if_false_label, if_true_label)
9186 tree exp;
9187 rtx if_false_label, if_true_label;
9189 register enum tree_code code = TREE_CODE (exp);
9190 /* Some cases need to create a label to jump to
9191 in order to properly fall through.
9192 These cases set DROP_THROUGH_LABEL nonzero. */
9193 rtx drop_through_label = 0;
9194 rtx temp;
9195 int i;
9196 tree type;
9197 enum machine_mode mode;
9199 #ifdef MAX_INTEGER_COMPUTATION_MODE
9200 check_max_integer_computation_mode (exp);
9201 #endif
9203 emit_queue ();
9205 switch (code)
9207 case ERROR_MARK:
9208 break;
9210 case INTEGER_CST:
9211 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9212 if (temp)
9213 emit_jump (temp);
9214 break;
9216 #if 0
9217 /* This is not true with #pragma weak */
9218 case ADDR_EXPR:
9219 /* The address of something can never be zero. */
9220 if (if_true_label)
9221 emit_jump (if_true_label);
9222 break;
9223 #endif
9225 case NOP_EXPR:
9226 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9227 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9228 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9229 goto normal;
9230 case CONVERT_EXPR:
9231 /* If we are narrowing the operand, we have to do the compare in the
9232 narrower mode. */
9233 if ((TYPE_PRECISION (TREE_TYPE (exp))
9234 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9235 goto normal;
9236 case NON_LVALUE_EXPR:
9237 case REFERENCE_EXPR:
9238 case ABS_EXPR:
9239 case NEGATE_EXPR:
9240 case LROTATE_EXPR:
9241 case RROTATE_EXPR:
9242 /* These cannot change zero->non-zero or vice versa. */
9243 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9244 break;
9246 case WITH_RECORD_EXPR:
9247 /* Put the object on the placeholder list, recurse through our first
9248 operand, and pop the list. */
9249 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9250 placeholder_list);
9251 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9252 placeholder_list = TREE_CHAIN (placeholder_list);
9253 break;
9255 #if 0
9256 /* This is never less insns than evaluating the PLUS_EXPR followed by
9257 a test and can be longer if the test is eliminated. */
9258 case PLUS_EXPR:
9259 /* Reduce to minus. */
9260 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9261 TREE_OPERAND (exp, 0),
9262 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9263 TREE_OPERAND (exp, 1))));
9264 /* Process as MINUS. */
9265 #endif
9267 case MINUS_EXPR:
9268 /* Non-zero iff operands of minus differ. */
9269 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9270 TREE_OPERAND (exp, 0),
9271 TREE_OPERAND (exp, 1)),
9272 NE, NE, if_false_label, if_true_label);
9273 break;
9275 case BIT_AND_EXPR:
9276 /* If we are AND'ing with a small constant, do this comparison in the
9277 smallest type that fits. If the machine doesn't have comparisons
9278 that small, it will be converted back to the wider comparison.
9279 This helps if we are testing the sign bit of a narrower object.
9280 combine can't do this for us because it can't know whether a
9281 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9283 if (! SLOW_BYTE_ACCESS
9284 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9285 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9286 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
9287 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9288 && (type = type_for_mode (mode, 1)) != 0
9289 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9290 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9291 != CODE_FOR_nothing))
9293 do_jump (convert (type, exp), if_false_label, if_true_label);
9294 break;
9296 goto normal;
9298 case TRUTH_NOT_EXPR:
9299 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9300 break;
9302 case TRUTH_ANDIF_EXPR:
9303 if (if_false_label == 0)
9304 if_false_label = drop_through_label = gen_label_rtx ();
9305 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9306 start_cleanup_deferral ();
9307 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9308 end_cleanup_deferral ();
9309 break;
9311 case TRUTH_ORIF_EXPR:
9312 if (if_true_label == 0)
9313 if_true_label = drop_through_label = gen_label_rtx ();
9314 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9315 start_cleanup_deferral ();
9316 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9317 end_cleanup_deferral ();
9318 break;
9320 case COMPOUND_EXPR:
9321 push_temp_slots ();
9322 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9323 preserve_temp_slots (NULL_RTX);
9324 free_temp_slots ();
9325 pop_temp_slots ();
9326 emit_queue ();
9327 do_pending_stack_adjust ();
9328 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9329 break;
9331 case COMPONENT_REF:
9332 case BIT_FIELD_REF:
9333 case ARRAY_REF:
9335 int bitsize, bitpos, unsignedp;
9336 enum machine_mode mode;
9337 tree type;
9338 tree offset;
9339 int volatilep = 0;
9340 unsigned int alignment;
9342 /* Get description of this reference. We don't actually care
9343 about the underlying object here. */
9344 get_inner_reference (exp, &bitsize, &bitpos, &offset,
9345 &mode, &unsignedp, &volatilep,
9346 &alignment);
9348 type = type_for_size (bitsize, unsignedp);
9349 if (! SLOW_BYTE_ACCESS
9350 && type != 0 && bitsize >= 0
9351 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9352 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9353 != CODE_FOR_nothing))
9355 do_jump (convert (type, exp), if_false_label, if_true_label);
9356 break;
9358 goto normal;
9361 case COND_EXPR:
9362 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9363 if (integer_onep (TREE_OPERAND (exp, 1))
9364 && integer_zerop (TREE_OPERAND (exp, 2)))
9365 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9367 else if (integer_zerop (TREE_OPERAND (exp, 1))
9368 && integer_onep (TREE_OPERAND (exp, 2)))
9369 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9371 else
9373 register rtx label1 = gen_label_rtx ();
9374 drop_through_label = gen_label_rtx ();
9376 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9378 start_cleanup_deferral ();
9379 /* Now the THEN-expression. */
9380 do_jump (TREE_OPERAND (exp, 1),
9381 if_false_label ? if_false_label : drop_through_label,
9382 if_true_label ? if_true_label : drop_through_label);
9383 /* In case the do_jump just above never jumps. */
9384 do_pending_stack_adjust ();
9385 emit_label (label1);
9387 /* Now the ELSE-expression. */
9388 do_jump (TREE_OPERAND (exp, 2),
9389 if_false_label ? if_false_label : drop_through_label,
9390 if_true_label ? if_true_label : drop_through_label);
9391 end_cleanup_deferral ();
9393 break;
9395 case EQ_EXPR:
9397 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9399 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9400 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9402 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9403 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9404 do_jump
9405 (fold
9406 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9407 fold (build (EQ_EXPR, TREE_TYPE (exp),
9408 fold (build1 (REALPART_EXPR,
9409 TREE_TYPE (inner_type),
9410 exp0)),
9411 fold (build1 (REALPART_EXPR,
9412 TREE_TYPE (inner_type),
9413 exp1)))),
9414 fold (build (EQ_EXPR, TREE_TYPE (exp),
9415 fold (build1 (IMAGPART_EXPR,
9416 TREE_TYPE (inner_type),
9417 exp0)),
9418 fold (build1 (IMAGPART_EXPR,
9419 TREE_TYPE (inner_type),
9420 exp1)))))),
9421 if_false_label, if_true_label);
9424 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9425 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9427 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9428 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9429 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9430 else
9431 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9432 break;
9435 case NE_EXPR:
9437 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9439 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9440 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9442 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9443 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9444 do_jump
9445 (fold
9446 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9447 fold (build (NE_EXPR, TREE_TYPE (exp),
9448 fold (build1 (REALPART_EXPR,
9449 TREE_TYPE (inner_type),
9450 exp0)),
9451 fold (build1 (REALPART_EXPR,
9452 TREE_TYPE (inner_type),
9453 exp1)))),
9454 fold (build (NE_EXPR, TREE_TYPE (exp),
9455 fold (build1 (IMAGPART_EXPR,
9456 TREE_TYPE (inner_type),
9457 exp0)),
9458 fold (build1 (IMAGPART_EXPR,
9459 TREE_TYPE (inner_type),
9460 exp1)))))),
9461 if_false_label, if_true_label);
9464 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9465 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9467 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9468 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9469 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9470 else
9471 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9472 break;
9475 case LT_EXPR:
9476 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9477 if (GET_MODE_CLASS (mode) == MODE_INT
9478 && ! can_compare_p (LT, mode, ccp_jump))
9479 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9480 else
9481 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9482 break;
9484 case LE_EXPR:
9485 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9486 if (GET_MODE_CLASS (mode) == MODE_INT
9487 && ! can_compare_p (LE, mode, ccp_jump))
9488 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9489 else
9490 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9491 break;
9493 case GT_EXPR:
9494 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9495 if (GET_MODE_CLASS (mode) == MODE_INT
9496 && ! can_compare_p (GT, mode, ccp_jump))
9497 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9498 else
9499 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9500 break;
9502 case GE_EXPR:
9503 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9504 if (GET_MODE_CLASS (mode) == MODE_INT
9505 && ! can_compare_p (GE, mode, ccp_jump))
9506 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9507 else
9508 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9509 break;
9511 case UNORDERED_EXPR:
9512 case ORDERED_EXPR:
9514 enum rtx_code cmp, rcmp;
9515 int do_rev;
9517 if (code == UNORDERED_EXPR)
9518 cmp = UNORDERED, rcmp = ORDERED;
9519 else
9520 cmp = ORDERED, rcmp = UNORDERED;
9521 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9523 do_rev = 0;
9524 if (! can_compare_p (cmp, mode, ccp_jump)
9525 && (can_compare_p (rcmp, mode, ccp_jump)
9526 /* If the target doesn't provide either UNORDERED or ORDERED
9527 comparisons, canonicalize on UNORDERED for the library. */
9528 || rcmp == UNORDERED))
9529 do_rev = 1;
9531 if (! do_rev)
9532 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9533 else
9534 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9536 break;
9539 enum rtx_code rcode1;
9540 enum tree_code tcode2;
9542 case UNLT_EXPR:
9543 rcode1 = UNLT;
9544 tcode2 = LT_EXPR;
9545 goto unordered_bcc;
9546 case UNLE_EXPR:
9547 rcode1 = UNLE;
9548 tcode2 = LE_EXPR;
9549 goto unordered_bcc;
9550 case UNGT_EXPR:
9551 rcode1 = UNGT;
9552 tcode2 = GT_EXPR;
9553 goto unordered_bcc;
9554 case UNGE_EXPR:
9555 rcode1 = UNGE;
9556 tcode2 = GE_EXPR;
9557 goto unordered_bcc;
9558 case UNEQ_EXPR:
9559 rcode1 = UNEQ;
9560 tcode2 = EQ_EXPR;
9561 goto unordered_bcc;
9563 unordered_bcc:
9564 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9565 if (can_compare_p (rcode1, mode, ccp_jump))
9566 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9567 if_true_label);
9568 else
9570 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9571 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9572 tree cmp0, cmp1;
9574 /* If the target doesn't support combined unordered
9575 compares, decompose into UNORDERED + comparison. */
9576 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9577 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9578 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9579 do_jump (exp, if_false_label, if_true_label);
9582 break;
9584 default:
9585 normal:
9586 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9587 #if 0
9588 /* This is not needed any more and causes poor code since it causes
9589 comparisons and tests from non-SI objects to have different code
9590 sequences. */
9591 /* Copy to register to avoid generating bad insns by cse
9592 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9593 if (!cse_not_expected && GET_CODE (temp) == MEM)
9594 temp = copy_to_reg (temp);
9595 #endif
9596 do_pending_stack_adjust ();
9597 /* Do any postincrements in the expression that was tested. */
9598 emit_queue ();
9600 if (GET_CODE (temp) == CONST_INT || GET_CODE (temp) == LABEL_REF)
9602 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9603 if (target)
9604 emit_jump (target);
9606 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9607 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9608 /* Note swapping the labels gives us not-equal. */
9609 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9610 else if (GET_MODE (temp) != VOIDmode)
9611 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9612 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9613 GET_MODE (temp), NULL_RTX, 0,
9614 if_false_label, if_true_label);
9615 else
9616 abort ();
9619 if (drop_through_label)
9621 /* If do_jump produces code that might be jumped around,
9622 do any stack adjusts from that code, before the place
9623 where control merges in. */
9624 do_pending_stack_adjust ();
9625 emit_label (drop_through_label);
9629 /* Given a comparison expression EXP for values too wide to be compared
9630 with one insn, test the comparison and jump to the appropriate label.
9631 The code of EXP is ignored; we always test GT if SWAP is 0,
9632 and LT if SWAP is 1. */
9634 static void
9635 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9636 tree exp;
9637 int swap;
9638 rtx if_false_label, if_true_label;
9640 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9641 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9642 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9643 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9645 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9648 /* Compare OP0 with OP1, word at a time, in mode MODE.
9649 UNSIGNEDP says to do unsigned comparison.
9650 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9652 void
9653 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9654 enum machine_mode mode;
9655 int unsignedp;
9656 rtx op0, op1;
9657 rtx if_false_label, if_true_label;
9659 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9660 rtx drop_through_label = 0;
9661 int i;
9663 if (! if_true_label || ! if_false_label)
9664 drop_through_label = gen_label_rtx ();
9665 if (! if_true_label)
9666 if_true_label = drop_through_label;
9667 if (! if_false_label)
9668 if_false_label = drop_through_label;
9670 /* Compare a word at a time, high order first. */
9671 for (i = 0; i < nwords; i++)
9673 rtx op0_word, op1_word;
9675 if (WORDS_BIG_ENDIAN)
9677 op0_word = operand_subword_force (op0, i, mode);
9678 op1_word = operand_subword_force (op1, i, mode);
9680 else
9682 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9683 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9686 /* All but high-order word must be compared as unsigned. */
9687 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9688 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9689 NULL_RTX, if_true_label);
9691 /* Consider lower words only if these are equal. */
9692 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9693 NULL_RTX, 0, NULL_RTX, if_false_label);
9696 if (if_false_label)
9697 emit_jump (if_false_label);
9698 if (drop_through_label)
9699 emit_label (drop_through_label);
9702 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9703 with one insn, test the comparison and jump to the appropriate label. */
9705 static void
9706 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9707 tree exp;
9708 rtx if_false_label, if_true_label;
9710 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9711 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9712 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9713 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9714 int i;
9715 rtx drop_through_label = 0;
9717 if (! if_false_label)
9718 drop_through_label = if_false_label = gen_label_rtx ();
9720 for (i = 0; i < nwords; i++)
9721 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9722 operand_subword_force (op1, i, mode),
9723 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9724 word_mode, NULL_RTX, 0, if_false_label,
9725 NULL_RTX);
9727 if (if_true_label)
9728 emit_jump (if_true_label);
9729 if (drop_through_label)
9730 emit_label (drop_through_label);
9733 /* Jump according to whether OP0 is 0.
9734 We assume that OP0 has an integer mode that is too wide
9735 for the available compare insns. */
9737 void
9738 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9739 rtx op0;
9740 rtx if_false_label, if_true_label;
9742 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9743 rtx part;
9744 int i;
9745 rtx drop_through_label = 0;
9747 /* The fastest way of doing this comparison on almost any machine is to
9748 "or" all the words and compare the result. If all have to be loaded
9749 from memory and this is a very wide item, it's possible this may
9750 be slower, but that's highly unlikely. */
9752 part = gen_reg_rtx (word_mode);
9753 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9754 for (i = 1; i < nwords && part != 0; i++)
9755 part = expand_binop (word_mode, ior_optab, part,
9756 operand_subword_force (op0, i, GET_MODE (op0)),
9757 part, 1, OPTAB_WIDEN);
9759 if (part != 0)
9761 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9762 NULL_RTX, 0, if_false_label, if_true_label);
9764 return;
9767 /* If we couldn't do the "or" simply, do this with a series of compares. */
9768 if (! if_false_label)
9769 drop_through_label = if_false_label = gen_label_rtx ();
9771 for (i = 0; i < nwords; i++)
9772 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9773 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9774 if_false_label, NULL_RTX);
9776 if (if_true_label)
9777 emit_jump (if_true_label);
9779 if (drop_through_label)
9780 emit_label (drop_through_label);
9783 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9784 (including code to compute the values to be compared)
9785 and set (CC0) according to the result.
9786 The decision as to signed or unsigned comparison must be made by the caller.
9788 We force a stack adjustment unless there are currently
9789 things pushed on the stack that aren't yet used.
9791 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9792 compared.
9794 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9795 size of MODE should be used. */
9798 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9799 register rtx op0, op1;
9800 enum rtx_code code;
9801 int unsignedp;
9802 enum machine_mode mode;
9803 rtx size;
9804 unsigned int align;
9806 rtx tem;
9808 /* If one operand is constant, make it the second one. Only do this
9809 if the other operand is not constant as well. */
9811 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9812 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9814 tem = op0;
9815 op0 = op1;
9816 op1 = tem;
9817 code = swap_condition (code);
9820 if (flag_force_mem)
9822 op0 = force_not_mem (op0);
9823 op1 = force_not_mem (op1);
9826 do_pending_stack_adjust ();
9828 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9829 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9830 return tem;
9832 #if 0
9833 /* There's no need to do this now that combine.c can eliminate lots of
9834 sign extensions. This can be less efficient in certain cases on other
9835 machines. */
9837 /* If this is a signed equality comparison, we can do it as an
9838 unsigned comparison since zero-extension is cheaper than sign
9839 extension and comparisons with zero are done as unsigned. This is
9840 the case even on machines that can do fast sign extension, since
9841 zero-extension is easier to combine with other operations than
9842 sign-extension is. If we are comparing against a constant, we must
9843 convert it to what it would look like unsigned. */
9844 if ((code == EQ || code == NE) && ! unsignedp
9845 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9847 if (GET_CODE (op1) == CONST_INT
9848 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9849 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9850 unsignedp = 1;
9852 #endif
9854 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
9856 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9859 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9860 The decision as to signed or unsigned comparison must be made by the caller.
9862 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9863 compared.
9865 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9866 size of MODE should be used. */
9868 void
9869 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
9870 if_false_label, if_true_label)
9871 register rtx op0, op1;
9872 enum rtx_code code;
9873 int unsignedp;
9874 enum machine_mode mode;
9875 rtx size;
9876 unsigned int align;
9877 rtx if_false_label, if_true_label;
9879 rtx tem;
9880 int dummy_true_label = 0;
9882 /* Reverse the comparison if that is safe and we want to jump if it is
9883 false. */
9884 if (! if_true_label && ! FLOAT_MODE_P (mode))
9886 if_true_label = if_false_label;
9887 if_false_label = 0;
9888 code = reverse_condition (code);
9891 /* If one operand is constant, make it the second one. Only do this
9892 if the other operand is not constant as well. */
9894 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9895 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9897 tem = op0;
9898 op0 = op1;
9899 op1 = tem;
9900 code = swap_condition (code);
9903 if (flag_force_mem)
9905 op0 = force_not_mem (op0);
9906 op1 = force_not_mem (op1);
9909 do_pending_stack_adjust ();
9911 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9912 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9914 if (tem == const_true_rtx)
9916 if (if_true_label)
9917 emit_jump (if_true_label);
9919 else
9921 if (if_false_label)
9922 emit_jump (if_false_label);
9924 return;
9927 #if 0
9928 /* There's no need to do this now that combine.c can eliminate lots of
9929 sign extensions. This can be less efficient in certain cases on other
9930 machines. */
9932 /* If this is a signed equality comparison, we can do it as an
9933 unsigned comparison since zero-extension is cheaper than sign
9934 extension and comparisons with zero are done as unsigned. This is
9935 the case even on machines that can do fast sign extension, since
9936 zero-extension is easier to combine with other operations than
9937 sign-extension is. If we are comparing against a constant, we must
9938 convert it to what it would look like unsigned. */
9939 if ((code == EQ || code == NE) && ! unsignedp
9940 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9942 if (GET_CODE (op1) == CONST_INT
9943 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9944 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9945 unsignedp = 1;
9947 #endif
9949 if (! if_true_label)
9951 dummy_true_label = 1;
9952 if_true_label = gen_label_rtx ();
9955 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
9956 if_true_label);
9958 if (if_false_label)
9959 emit_jump (if_false_label);
9960 if (dummy_true_label)
9961 emit_label (if_true_label);
9964 /* Generate code for a comparison expression EXP (including code to compute
9965 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
9966 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
9967 generated code will drop through.
9968 SIGNED_CODE should be the rtx operation for this comparison for
9969 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9971 We force a stack adjustment unless there are currently
9972 things pushed on the stack that aren't yet used. */
9974 static void
9975 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
9976 if_true_label)
9977 register tree exp;
9978 enum rtx_code signed_code, unsigned_code;
9979 rtx if_false_label, if_true_label;
9981 unsigned int align0, align1;
9982 register rtx op0, op1;
9983 register tree type;
9984 register enum machine_mode mode;
9985 int unsignedp;
9986 enum rtx_code code;
9988 /* Don't crash if the comparison was erroneous. */
9989 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
9990 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9991 return;
9993 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
9994 type = TREE_TYPE (TREE_OPERAND (exp, 0));
9995 mode = TYPE_MODE (type);
9996 unsignedp = TREE_UNSIGNED (type);
9997 code = unsignedp ? unsigned_code : signed_code;
9999 #ifdef HAVE_canonicalize_funcptr_for_compare
10000 /* If function pointers need to be "canonicalized" before they can
10001 be reliably compared, then canonicalize them. */
10002 if (HAVE_canonicalize_funcptr_for_compare
10003 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10004 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10005 == FUNCTION_TYPE))
10007 rtx new_op0 = gen_reg_rtx (mode);
10009 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10010 op0 = new_op0;
10013 if (HAVE_canonicalize_funcptr_for_compare
10014 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10015 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10016 == FUNCTION_TYPE))
10018 rtx new_op1 = gen_reg_rtx (mode);
10020 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10021 op1 = new_op1;
10023 #endif
10025 /* Do any postincrements in the expression that was tested. */
10026 emit_queue ();
10028 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10029 ((mode == BLKmode)
10030 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10031 MIN (align0, align1) / BITS_PER_UNIT,
10032 if_false_label, if_true_label);
10035 /* Generate code to calculate EXP using a store-flag instruction
10036 and return an rtx for the result. EXP is either a comparison
10037 or a TRUTH_NOT_EXPR whose operand is a comparison.
10039 If TARGET is nonzero, store the result there if convenient.
10041 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10042 cheap.
10044 Return zero if there is no suitable set-flag instruction
10045 available on this machine.
10047 Once expand_expr has been called on the arguments of the comparison,
10048 we are committed to doing the store flag, since it is not safe to
10049 re-evaluate the expression. We emit the store-flag insn by calling
10050 emit_store_flag, but only expand the arguments if we have a reason
10051 to believe that emit_store_flag will be successful. If we think that
10052 it will, but it isn't, we have to simulate the store-flag with a
10053 set/jump/set sequence. */
10055 static rtx
10056 do_store_flag (exp, target, mode, only_cheap)
10057 tree exp;
10058 rtx target;
10059 enum machine_mode mode;
10060 int only_cheap;
10062 enum rtx_code code;
10063 tree arg0, arg1, type;
10064 tree tem;
10065 enum machine_mode operand_mode;
10066 int invert = 0;
10067 int unsignedp;
10068 rtx op0, op1;
10069 enum insn_code icode;
10070 rtx subtarget = target;
10071 rtx result, label;
10073 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10074 result at the end. We can't simply invert the test since it would
10075 have already been inverted if it were valid. This case occurs for
10076 some floating-point comparisons. */
10078 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10079 invert = 1, exp = TREE_OPERAND (exp, 0);
10081 arg0 = TREE_OPERAND (exp, 0);
10082 arg1 = TREE_OPERAND (exp, 1);
10083 type = TREE_TYPE (arg0);
10084 operand_mode = TYPE_MODE (type);
10085 unsignedp = TREE_UNSIGNED (type);
10087 /* We won't bother with BLKmode store-flag operations because it would mean
10088 passing a lot of information to emit_store_flag. */
10089 if (operand_mode == BLKmode)
10090 return 0;
10092 /* We won't bother with store-flag operations involving function pointers
10093 when function pointers must be canonicalized before comparisons. */
10094 #ifdef HAVE_canonicalize_funcptr_for_compare
10095 if (HAVE_canonicalize_funcptr_for_compare
10096 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10097 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10098 == FUNCTION_TYPE))
10099 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10100 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10101 == FUNCTION_TYPE))))
10102 return 0;
10103 #endif
10105 STRIP_NOPS (arg0);
10106 STRIP_NOPS (arg1);
10108 /* Get the rtx comparison code to use. We know that EXP is a comparison
10109 operation of some type. Some comparisons against 1 and -1 can be
10110 converted to comparisons with zero. Do so here so that the tests
10111 below will be aware that we have a comparison with zero. These
10112 tests will not catch constants in the first operand, but constants
10113 are rarely passed as the first operand. */
10115 switch (TREE_CODE (exp))
10117 case EQ_EXPR:
10118 code = EQ;
10119 break;
10120 case NE_EXPR:
10121 code = NE;
10122 break;
10123 case LT_EXPR:
10124 if (integer_onep (arg1))
10125 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10126 else
10127 code = unsignedp ? LTU : LT;
10128 break;
10129 case LE_EXPR:
10130 if (! unsignedp && integer_all_onesp (arg1))
10131 arg1 = integer_zero_node, code = LT;
10132 else
10133 code = unsignedp ? LEU : LE;
10134 break;
10135 case GT_EXPR:
10136 if (! unsignedp && integer_all_onesp (arg1))
10137 arg1 = integer_zero_node, code = GE;
10138 else
10139 code = unsignedp ? GTU : GT;
10140 break;
10141 case GE_EXPR:
10142 if (integer_onep (arg1))
10143 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10144 else
10145 code = unsignedp ? GEU : GE;
10146 break;
10148 case UNORDERED_EXPR:
10149 code = UNORDERED;
10150 break;
10151 case ORDERED_EXPR:
10152 code = ORDERED;
10153 break;
10154 case UNLT_EXPR:
10155 code = UNLT;
10156 break;
10157 case UNLE_EXPR:
10158 code = UNLE;
10159 break;
10160 case UNGT_EXPR:
10161 code = UNGT;
10162 break;
10163 case UNGE_EXPR:
10164 code = UNGE;
10165 break;
10166 case UNEQ_EXPR:
10167 code = UNEQ;
10168 break;
10170 default:
10171 abort ();
10174 /* Put a constant second. */
10175 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10177 tem = arg0; arg0 = arg1; arg1 = tem;
10178 code = swap_condition (code);
10181 /* If this is an equality or inequality test of a single bit, we can
10182 do this by shifting the bit being tested to the low-order bit and
10183 masking the result with the constant 1. If the condition was EQ,
10184 we xor it with 1. This does not require an scc insn and is faster
10185 than an scc insn even if we have it. */
10187 if ((code == NE || code == EQ)
10188 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10189 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10191 tree inner = TREE_OPERAND (arg0, 0);
10192 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10193 int ops_unsignedp;
10195 /* If INNER is a right shift of a constant and it plus BITNUM does
10196 not overflow, adjust BITNUM and INNER. */
10198 if (TREE_CODE (inner) == RSHIFT_EXPR
10199 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10200 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10201 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
10202 < TYPE_PRECISION (type)))
10204 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10205 inner = TREE_OPERAND (inner, 0);
10208 /* If we are going to be able to omit the AND below, we must do our
10209 operations as unsigned. If we must use the AND, we have a choice.
10210 Normally unsigned is faster, but for some machines signed is. */
10211 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10212 #ifdef LOAD_EXTEND_OP
10213 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10214 #else
10216 #endif
10219 if (subtarget == 0 || GET_CODE (subtarget) != REG
10220 || GET_MODE (subtarget) != operand_mode
10221 || ! safe_from_p (subtarget, inner, 1))
10222 subtarget = 0;
10224 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10226 if (bitnum != 0)
10227 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10228 size_int (bitnum), subtarget, ops_unsignedp);
10230 if (GET_MODE (op0) != mode)
10231 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10233 if ((code == EQ && ! invert) || (code == NE && invert))
10234 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10235 ops_unsignedp, OPTAB_LIB_WIDEN);
10237 /* Put the AND last so it can combine with more things. */
10238 if (bitnum != TYPE_PRECISION (type) - 1)
10239 op0 = expand_and (op0, const1_rtx, subtarget);
10241 return op0;
10244 /* Now see if we are likely to be able to do this. Return if not. */
10245 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10246 return 0;
10248 icode = setcc_gen_code[(int) code];
10249 if (icode == CODE_FOR_nothing
10250 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10252 /* We can only do this if it is one of the special cases that
10253 can be handled without an scc insn. */
10254 if ((code == LT && integer_zerop (arg1))
10255 || (! only_cheap && code == GE && integer_zerop (arg1)))
10257 else if (BRANCH_COST >= 0
10258 && ! only_cheap && (code == NE || code == EQ)
10259 && TREE_CODE (type) != REAL_TYPE
10260 && ((abs_optab->handlers[(int) operand_mode].insn_code
10261 != CODE_FOR_nothing)
10262 || (ffs_optab->handlers[(int) operand_mode].insn_code
10263 != CODE_FOR_nothing)))
10265 else
10266 return 0;
10269 preexpand_calls (exp);
10270 if (subtarget == 0 || GET_CODE (subtarget) != REG
10271 || GET_MODE (subtarget) != operand_mode
10272 || ! safe_from_p (subtarget, arg1, 1))
10273 subtarget = 0;
10275 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10276 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10278 if (target == 0)
10279 target = gen_reg_rtx (mode);
10281 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10282 because, if the emit_store_flag does anything it will succeed and
10283 OP0 and OP1 will not be used subsequently. */
10285 result = emit_store_flag (target, code,
10286 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10287 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10288 operand_mode, unsignedp, 1);
10290 if (result)
10292 if (invert)
10293 result = expand_binop (mode, xor_optab, result, const1_rtx,
10294 result, 0, OPTAB_LIB_WIDEN);
10295 return result;
10298 /* If this failed, we have to do this with set/compare/jump/set code. */
10299 if (GET_CODE (target) != REG
10300 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10301 target = gen_reg_rtx (GET_MODE (target));
10303 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10304 result = compare_from_rtx (op0, op1, code, unsignedp,
10305 operand_mode, NULL_RTX, 0);
10306 if (GET_CODE (result) == CONST_INT)
10307 return (((result == const0_rtx && ! invert)
10308 || (result != const0_rtx && invert))
10309 ? const0_rtx : const1_rtx);
10311 label = gen_label_rtx ();
10312 if (bcc_gen_fctn[(int) code] == 0)
10313 abort ();
10315 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10316 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10317 emit_label (label);
10319 return target;
10322 /* Generate a tablejump instruction (used for switch statements). */
10324 #ifdef HAVE_tablejump
10326 /* INDEX is the value being switched on, with the lowest value
10327 in the table already subtracted.
10328 MODE is its expected mode (needed if INDEX is constant).
10329 RANGE is the length of the jump table.
10330 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10332 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10333 index value is out of range. */
10335 void
10336 do_tablejump (index, mode, range, table_label, default_label)
10337 rtx index, range, table_label, default_label;
10338 enum machine_mode mode;
10340 register rtx temp, vector;
10342 /* Do an unsigned comparison (in the proper mode) between the index
10343 expression and the value which represents the length of the range.
10344 Since we just finished subtracting the lower bound of the range
10345 from the index expression, this comparison allows us to simultaneously
10346 check that the original index expression value is both greater than
10347 or equal to the minimum value of the range and less than or equal to
10348 the maximum value of the range. */
10350 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10351 0, default_label);
10353 /* If index is in range, it must fit in Pmode.
10354 Convert to Pmode so we can index with it. */
10355 if (mode != Pmode)
10356 index = convert_to_mode (Pmode, index, 1);
10358 /* Don't let a MEM slip thru, because then INDEX that comes
10359 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10360 and break_out_memory_refs will go to work on it and mess it up. */
10361 #ifdef PIC_CASE_VECTOR_ADDRESS
10362 if (flag_pic && GET_CODE (index) != REG)
10363 index = copy_to_mode_reg (Pmode, index);
10364 #endif
10366 /* If flag_force_addr were to affect this address
10367 it could interfere with the tricky assumptions made
10368 about addresses that contain label-refs,
10369 which may be valid only very near the tablejump itself. */
10370 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10371 GET_MODE_SIZE, because this indicates how large insns are. The other
10372 uses should all be Pmode, because they are addresses. This code
10373 could fail if addresses and insns are not the same size. */
10374 index = gen_rtx_PLUS (Pmode,
10375 gen_rtx_MULT (Pmode, index,
10376 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10377 gen_rtx_LABEL_REF (Pmode, table_label));
10378 #ifdef PIC_CASE_VECTOR_ADDRESS
10379 if (flag_pic)
10380 index = PIC_CASE_VECTOR_ADDRESS (index);
10381 else
10382 #endif
10383 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10384 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10385 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10386 RTX_UNCHANGING_P (vector) = 1;
10387 convert_move (temp, vector, 0);
10389 emit_jump_insn (gen_tablejump (temp, table_label));
10391 /* If we are generating PIC code or if the table is PC-relative, the
10392 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10393 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10394 emit_barrier ();
10397 #endif /* HAVE_tablejump */