1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92-98, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
30 #include "hard-reg-set.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "insn-config.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
40 #include "typeclass.h"
46 #define CEIL(x,y) (((x) + (y) - 1) / (y))
48 /* Decide whether a function's arguments should be processed
49 from first to last or from last to first.
51 They should if the stack and args grow in opposite directions, but
52 only if we have push insns. */
56 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
57 #define PUSH_ARGS_REVERSED /* If it's last to first */
62 #ifndef STACK_PUSH_CODE
63 #ifdef STACK_GROWS_DOWNWARD
64 #define STACK_PUSH_CODE PRE_DEC
66 #define STACK_PUSH_CODE PRE_INC
70 /* Assume that case vectors are not pc-relative. */
71 #ifndef CASE_VECTOR_PC_RELATIVE
72 #define CASE_VECTOR_PC_RELATIVE 0
75 /* If this is nonzero, we do not bother generating VOLATILE
76 around volatile memory references, and we are willing to
77 output indirect addresses. If cse is to follow, we reject
78 indirect addresses so a useful potential cse is generated;
79 if it is used only once, instruction combination will produce
80 the same indirect address eventually. */
83 /* Nonzero to generate code for all the subroutines within an
84 expression before generating the upper levels of the expression.
85 Nowadays this is never zero. */
86 int do_preexpand_calls
= 1;
88 /* Don't check memory usage, since code is being emitted to check a memory
89 usage. Used when current_function_check_memory_usage is true, to avoid
90 infinite recursion. */
91 static int in_check_memory_usage
;
93 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
94 static tree placeholder_list
= 0;
96 /* This structure is used by move_by_pieces to describe the move to
109 int explicit_inc_from
;
117 /* This structure is used by clear_by_pieces to describe the clear to
120 struct clear_by_pieces
132 extern struct obstack permanent_obstack
;
134 static rtx get_push_address
PROTO ((int));
136 static rtx enqueue_insn
PROTO((rtx
, rtx
));
137 static int move_by_pieces_ninsns
PROTO((unsigned int, int));
138 static void move_by_pieces_1
PROTO((rtx (*) (rtx
, ...), enum machine_mode
,
139 struct move_by_pieces
*));
140 static void clear_by_pieces
PROTO((rtx
, int, int));
141 static void clear_by_pieces_1
PROTO((rtx (*) (rtx
, ...),
143 struct clear_by_pieces
*));
144 static int is_zeros_p
PROTO((tree
));
145 static int mostly_zeros_p
PROTO((tree
));
146 static void store_constructor_field
PROTO((rtx
, int, int, enum machine_mode
,
147 tree
, tree
, int, int));
148 static void store_constructor
PROTO((tree
, rtx
, int, int));
149 static rtx store_field
PROTO((rtx
, int, int, enum machine_mode
, tree
,
150 enum machine_mode
, int, int,
152 static enum memory_use_mode
153 get_memory_usage_from_modifier
PROTO((enum expand_modifier
));
154 static tree save_noncopied_parts
PROTO((tree
, tree
));
155 static tree init_noncopied_parts
PROTO((tree
, tree
));
156 static int safe_from_p
PROTO((rtx
, tree
, int));
157 static int fixed_type_p
PROTO((tree
));
158 static rtx var_rtx
PROTO((tree
));
159 static int readonly_fields_p
PROTO((tree
));
160 static rtx expand_expr_unaligned
PROTO((tree
, int *));
161 static rtx expand_increment
PROTO((tree
, int, int));
162 static void preexpand_calls
PROTO((tree
));
163 static void do_jump_by_parts_greater
PROTO((tree
, int, rtx
, rtx
));
164 static void do_jump_by_parts_equality
PROTO((tree
, rtx
, rtx
));
165 static void do_compare_and_jump
PROTO((tree
, enum rtx_code
, enum rtx_code
, rtx
, rtx
));
166 static rtx do_store_flag
PROTO((tree
, rtx
, enum machine_mode
, int));
168 /* Record for each mode whether we can move a register directly to or
169 from an object of that mode in memory. If we can't, we won't try
170 to use that mode directly when accessing a field of that mode. */
172 static char direct_load
[NUM_MACHINE_MODES
];
173 static char direct_store
[NUM_MACHINE_MODES
];
175 /* If a memory-to-memory move would take MOVE_RATIO or more simple
176 move-instruction sequences, we will do a movstr or libcall instead. */
179 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
182 /* If we are optimizing for space (-Os), cut down the default move ratio */
183 #define MOVE_RATIO (optimize_size ? 3 : 15)
187 /* This macro is used to determine whether move_by_pieces should be called
188 to perform a structure copy. */
189 #ifndef MOVE_BY_PIECES_P
190 #define MOVE_BY_PIECES_P(SIZE, ALIGN) (move_by_pieces_ninsns \
191 (SIZE, ALIGN) < MOVE_RATIO)
194 /* This array records the insn_code of insns to perform block moves. */
195 enum insn_code movstr_optab
[NUM_MACHINE_MODES
];
197 /* This array records the insn_code of insns to perform block clears. */
198 enum insn_code clrstr_optab
[NUM_MACHINE_MODES
];
200 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
202 #ifndef SLOW_UNALIGNED_ACCESS
203 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
206 /* This is run once per compilation to set up which modes can be used
207 directly in memory and to initialize the block move optab. */
213 enum machine_mode mode
;
220 /* Since we are on the permanent obstack, we must be sure we save this
221 spot AFTER we call start_sequence, since it will reuse the rtl it
223 free_point
= (char *) oballoc (0);
225 /* Try indexing by frame ptr and try by stack ptr.
226 It is known that on the Convex the stack ptr isn't a valid index.
227 With luck, one or the other is valid on any machine. */
228 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
229 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
231 insn
= emit_insn (gen_rtx_SET (0, NULL_RTX
, NULL_RTX
));
232 pat
= PATTERN (insn
);
234 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
235 mode
= (enum machine_mode
) ((int) mode
+ 1))
240 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
241 PUT_MODE (mem
, mode
);
242 PUT_MODE (mem1
, mode
);
244 /* See if there is some register that can be used in this mode and
245 directly loaded or stored from memory. */
247 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
248 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
249 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
252 if (! HARD_REGNO_MODE_OK (regno
, mode
))
255 reg
= gen_rtx_REG (mode
, regno
);
258 SET_DEST (pat
) = reg
;
259 if (recog (pat
, insn
, &num_clobbers
) >= 0)
260 direct_load
[(int) mode
] = 1;
262 SET_SRC (pat
) = mem1
;
263 SET_DEST (pat
) = reg
;
264 if (recog (pat
, insn
, &num_clobbers
) >= 0)
265 direct_load
[(int) mode
] = 1;
268 SET_DEST (pat
) = mem
;
269 if (recog (pat
, insn
, &num_clobbers
) >= 0)
270 direct_store
[(int) mode
] = 1;
273 SET_DEST (pat
) = mem1
;
274 if (recog (pat
, insn
, &num_clobbers
) >= 0)
275 direct_store
[(int) mode
] = 1;
283 /* This is run at the start of compiling a function. */
288 current_function
->expr
289 = (struct expr_status
*) xmalloc (sizeof (struct expr_status
));
292 pending_stack_adjust
= 0;
293 inhibit_defer_pop
= 0;
295 apply_args_value
= 0;
301 struct expr_status
*p
;
306 ggc_mark_rtx (p
->x_saveregs_value
);
307 ggc_mark_rtx (p
->x_apply_args_value
);
308 ggc_mark_rtx (p
->x_forced_labels
);
319 /* Small sanity check that the queue is empty at the end of a function. */
321 finish_expr_for_function ()
327 /* Manage the queue of increment instructions to be output
328 for POSTINCREMENT_EXPR expressions, etc. */
330 /* Queue up to increment (or change) VAR later. BODY says how:
331 BODY should be the same thing you would pass to emit_insn
332 to increment right away. It will go to emit_insn later on.
334 The value is a QUEUED expression to be used in place of VAR
335 where you want to guarantee the pre-incrementation value of VAR. */
338 enqueue_insn (var
, body
)
341 pending_chain
= gen_rtx_QUEUED (GET_MODE (var
), var
, NULL_RTX
, NULL_RTX
,
342 body
, pending_chain
);
343 return pending_chain
;
346 /* Use protect_from_queue to convert a QUEUED expression
347 into something that you can put immediately into an instruction.
348 If the queued incrementation has not happened yet,
349 protect_from_queue returns the variable itself.
350 If the incrementation has happened, protect_from_queue returns a temp
351 that contains a copy of the old value of the variable.
353 Any time an rtx which might possibly be a QUEUED is to be put
354 into an instruction, it must be passed through protect_from_queue first.
355 QUEUED expressions are not meaningful in instructions.
357 Do not pass a value through protect_from_queue and then hold
358 on to it for a while before putting it in an instruction!
359 If the queue is flushed in between, incorrect code will result. */
362 protect_from_queue (x
, modify
)
366 register RTX_CODE code
= GET_CODE (x
);
368 #if 0 /* A QUEUED can hang around after the queue is forced out. */
369 /* Shortcut for most common case. */
370 if (pending_chain
== 0)
376 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
377 use of autoincrement. Make a copy of the contents of the memory
378 location rather than a copy of the address, but not if the value is
379 of mode BLKmode. Don't modify X in place since it might be
381 if (code
== MEM
&& GET_MODE (x
) != BLKmode
382 && GET_CODE (XEXP (x
, 0)) == QUEUED
&& !modify
)
384 register rtx y
= XEXP (x
, 0);
385 register rtx
new = gen_rtx_MEM (GET_MODE (x
), QUEUED_VAR (y
));
387 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x
);
388 MEM_COPY_ATTRIBUTES (new, x
);
389 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x
);
393 register rtx temp
= gen_reg_rtx (GET_MODE (new));
394 emit_insn_before (gen_move_insn (temp
, new),
400 /* Otherwise, recursively protect the subexpressions of all
401 the kinds of rtx's that can contain a QUEUED. */
404 rtx tem
= protect_from_queue (XEXP (x
, 0), 0);
405 if (tem
!= XEXP (x
, 0))
411 else if (code
== PLUS
|| code
== MULT
)
413 rtx new0
= protect_from_queue (XEXP (x
, 0), 0);
414 rtx new1
= protect_from_queue (XEXP (x
, 1), 0);
415 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
424 /* If the increment has not happened, use the variable itself. */
425 if (QUEUED_INSN (x
) == 0)
426 return QUEUED_VAR (x
);
427 /* If the increment has happened and a pre-increment copy exists,
429 if (QUEUED_COPY (x
) != 0)
430 return QUEUED_COPY (x
);
431 /* The increment has happened but we haven't set up a pre-increment copy.
432 Set one up now, and use it. */
433 QUEUED_COPY (x
) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x
)));
434 emit_insn_before (gen_move_insn (QUEUED_COPY (x
), QUEUED_VAR (x
)),
436 return QUEUED_COPY (x
);
439 /* Return nonzero if X contains a QUEUED expression:
440 if it contains anything that will be altered by a queued increment.
441 We handle only combinations of MEM, PLUS, MINUS and MULT operators
442 since memory addresses generally contain only those. */
448 register enum rtx_code code
= GET_CODE (x
);
454 return queued_subexp_p (XEXP (x
, 0));
458 return (queued_subexp_p (XEXP (x
, 0))
459 || queued_subexp_p (XEXP (x
, 1)));
465 /* Perform all the pending incrementations. */
471 while ((p
= pending_chain
))
473 rtx body
= QUEUED_BODY (p
);
475 if (GET_CODE (body
) == SEQUENCE
)
477 QUEUED_INSN (p
) = XVECEXP (QUEUED_BODY (p
), 0, 0);
478 emit_insn (QUEUED_BODY (p
));
481 QUEUED_INSN (p
) = emit_insn (QUEUED_BODY (p
));
482 pending_chain
= QUEUED_NEXT (p
);
486 /* Copy data from FROM to TO, where the machine modes are not the same.
487 Both modes may be integer, or both may be floating.
488 UNSIGNEDP should be nonzero if FROM is an unsigned type.
489 This causes zero-extension instead of sign-extension. */
492 convert_move (to
, from
, unsignedp
)
493 register rtx to
, from
;
496 enum machine_mode to_mode
= GET_MODE (to
);
497 enum machine_mode from_mode
= GET_MODE (from
);
498 int to_real
= GET_MODE_CLASS (to_mode
) == MODE_FLOAT
;
499 int from_real
= GET_MODE_CLASS (from_mode
) == MODE_FLOAT
;
503 /* rtx code for making an equivalent value. */
504 enum rtx_code equiv_code
= (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
);
506 to
= protect_from_queue (to
, 1);
507 from
= protect_from_queue (from
, 0);
509 if (to_real
!= from_real
)
512 /* If FROM is a SUBREG that indicates that we have already done at least
513 the required extension, strip it. We don't handle such SUBREGs as
516 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
517 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
518 >= GET_MODE_SIZE (to_mode
))
519 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
520 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
522 if (GET_CODE (to
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (to
))
525 if (to_mode
== from_mode
526 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
528 emit_move_insn (to
, from
);
536 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
))
538 /* Try converting directly if the insn is supported. */
539 if ((code
= can_extend_p (to_mode
, from_mode
, 0))
542 emit_unop_insn (code
, to
, from
, UNKNOWN
);
547 #ifdef HAVE_trunchfqf2
548 if (HAVE_trunchfqf2
&& from_mode
== HFmode
&& to_mode
== QFmode
)
550 emit_unop_insn (CODE_FOR_trunchfqf2
, to
, from
, UNKNOWN
);
554 #ifdef HAVE_trunctqfqf2
555 if (HAVE_trunctqfqf2
&& from_mode
== TQFmode
&& to_mode
== QFmode
)
557 emit_unop_insn (CODE_FOR_trunctqfqf2
, to
, from
, UNKNOWN
);
561 #ifdef HAVE_truncsfqf2
562 if (HAVE_truncsfqf2
&& from_mode
== SFmode
&& to_mode
== QFmode
)
564 emit_unop_insn (CODE_FOR_truncsfqf2
, to
, from
, UNKNOWN
);
568 #ifdef HAVE_truncdfqf2
569 if (HAVE_truncdfqf2
&& from_mode
== DFmode
&& to_mode
== QFmode
)
571 emit_unop_insn (CODE_FOR_truncdfqf2
, to
, from
, UNKNOWN
);
575 #ifdef HAVE_truncxfqf2
576 if (HAVE_truncxfqf2
&& from_mode
== XFmode
&& to_mode
== QFmode
)
578 emit_unop_insn (CODE_FOR_truncxfqf2
, to
, from
, UNKNOWN
);
582 #ifdef HAVE_trunctfqf2
583 if (HAVE_trunctfqf2
&& from_mode
== TFmode
&& to_mode
== QFmode
)
585 emit_unop_insn (CODE_FOR_trunctfqf2
, to
, from
, UNKNOWN
);
590 #ifdef HAVE_trunctqfhf2
591 if (HAVE_trunctqfhf2
&& from_mode
== TQFmode
&& to_mode
== HFmode
)
593 emit_unop_insn (CODE_FOR_trunctqfhf2
, to
, from
, UNKNOWN
);
597 #ifdef HAVE_truncsfhf2
598 if (HAVE_truncsfhf2
&& from_mode
== SFmode
&& to_mode
== HFmode
)
600 emit_unop_insn (CODE_FOR_truncsfhf2
, to
, from
, UNKNOWN
);
604 #ifdef HAVE_truncdfhf2
605 if (HAVE_truncdfhf2
&& from_mode
== DFmode
&& to_mode
== HFmode
)
607 emit_unop_insn (CODE_FOR_truncdfhf2
, to
, from
, UNKNOWN
);
611 #ifdef HAVE_truncxfhf2
612 if (HAVE_truncxfhf2
&& from_mode
== XFmode
&& to_mode
== HFmode
)
614 emit_unop_insn (CODE_FOR_truncxfhf2
, to
, from
, UNKNOWN
);
618 #ifdef HAVE_trunctfhf2
619 if (HAVE_trunctfhf2
&& from_mode
== TFmode
&& to_mode
== HFmode
)
621 emit_unop_insn (CODE_FOR_trunctfhf2
, to
, from
, UNKNOWN
);
626 #ifdef HAVE_truncsftqf2
627 if (HAVE_truncsftqf2
&& from_mode
== SFmode
&& to_mode
== TQFmode
)
629 emit_unop_insn (CODE_FOR_truncsftqf2
, to
, from
, UNKNOWN
);
633 #ifdef HAVE_truncdftqf2
634 if (HAVE_truncdftqf2
&& from_mode
== DFmode
&& to_mode
== TQFmode
)
636 emit_unop_insn (CODE_FOR_truncdftqf2
, to
, from
, UNKNOWN
);
640 #ifdef HAVE_truncxftqf2
641 if (HAVE_truncxftqf2
&& from_mode
== XFmode
&& to_mode
== TQFmode
)
643 emit_unop_insn (CODE_FOR_truncxftqf2
, to
, from
, UNKNOWN
);
647 #ifdef HAVE_trunctftqf2
648 if (HAVE_trunctftqf2
&& from_mode
== TFmode
&& to_mode
== TQFmode
)
650 emit_unop_insn (CODE_FOR_trunctftqf2
, to
, from
, UNKNOWN
);
655 #ifdef HAVE_truncdfsf2
656 if (HAVE_truncdfsf2
&& from_mode
== DFmode
&& to_mode
== SFmode
)
658 emit_unop_insn (CODE_FOR_truncdfsf2
, to
, from
, UNKNOWN
);
662 #ifdef HAVE_truncxfsf2
663 if (HAVE_truncxfsf2
&& from_mode
== XFmode
&& to_mode
== SFmode
)
665 emit_unop_insn (CODE_FOR_truncxfsf2
, to
, from
, UNKNOWN
);
669 #ifdef HAVE_trunctfsf2
670 if (HAVE_trunctfsf2
&& from_mode
== TFmode
&& to_mode
== SFmode
)
672 emit_unop_insn (CODE_FOR_trunctfsf2
, to
, from
, UNKNOWN
);
676 #ifdef HAVE_truncxfdf2
677 if (HAVE_truncxfdf2
&& from_mode
== XFmode
&& to_mode
== DFmode
)
679 emit_unop_insn (CODE_FOR_truncxfdf2
, to
, from
, UNKNOWN
);
683 #ifdef HAVE_trunctfdf2
684 if (HAVE_trunctfdf2
&& from_mode
== TFmode
&& to_mode
== DFmode
)
686 emit_unop_insn (CODE_FOR_trunctfdf2
, to
, from
, UNKNOWN
);
698 libcall
= extendsfdf2_libfunc
;
702 libcall
= extendsfxf2_libfunc
;
706 libcall
= extendsftf2_libfunc
;
718 libcall
= truncdfsf2_libfunc
;
722 libcall
= extenddfxf2_libfunc
;
726 libcall
= extenddftf2_libfunc
;
738 libcall
= truncxfsf2_libfunc
;
742 libcall
= truncxfdf2_libfunc
;
754 libcall
= trunctfsf2_libfunc
;
758 libcall
= trunctfdf2_libfunc
;
770 if (libcall
== (rtx
) 0)
771 /* This conversion is not implemented yet. */
774 value
= emit_library_call_value (libcall
, NULL_RTX
, 1, to_mode
,
776 emit_move_insn (to
, value
);
780 /* Now both modes are integers. */
782 /* Handle expanding beyond a word. */
783 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
784 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
791 enum machine_mode lowpart_mode
;
792 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
794 /* Try converting directly if the insn is supported. */
795 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
798 /* If FROM is a SUBREG, put it into a register. Do this
799 so that we always generate the same set of insns for
800 better cse'ing; if an intermediate assignment occurred,
801 we won't be doing the operation directly on the SUBREG. */
802 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
803 from
= force_reg (from_mode
, from
);
804 emit_unop_insn (code
, to
, from
, equiv_code
);
807 /* Next, try converting via full word. */
808 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
809 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
810 != CODE_FOR_nothing
))
812 if (GET_CODE (to
) == REG
)
813 emit_insn (gen_rtx_CLOBBER (VOIDmode
, to
));
814 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
815 emit_unop_insn (code
, to
,
816 gen_lowpart (word_mode
, to
), equiv_code
);
820 /* No special multiword conversion insn; do it by hand. */
823 /* Since we will turn this into a no conflict block, we must ensure
824 that the source does not overlap the target. */
826 if (reg_overlap_mentioned_p (to
, from
))
827 from
= force_reg (from_mode
, from
);
829 /* Get a copy of FROM widened to a word, if necessary. */
830 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
831 lowpart_mode
= word_mode
;
833 lowpart_mode
= from_mode
;
835 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
837 lowpart
= gen_lowpart (lowpart_mode
, to
);
838 emit_move_insn (lowpart
, lowfrom
);
840 /* Compute the value to put in each remaining word. */
842 fill_value
= const0_rtx
;
847 && insn_data
[(int) CODE_FOR_slt
].operand
[0].mode
== word_mode
848 && STORE_FLAG_VALUE
== -1)
850 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
852 fill_value
= gen_reg_rtx (word_mode
);
853 emit_insn (gen_slt (fill_value
));
859 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
860 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
862 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
866 /* Fill the remaining words. */
867 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
869 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
870 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
875 if (fill_value
!= subword
)
876 emit_move_insn (subword
, fill_value
);
879 insns
= get_insns ();
882 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
883 gen_rtx_fmt_e (equiv_code
, to_mode
, copy_rtx (from
)));
887 /* Truncating multi-word to a word or less. */
888 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
889 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
891 if (!((GET_CODE (from
) == MEM
892 && ! MEM_VOLATILE_P (from
)
893 && direct_load
[(int) to_mode
]
894 && ! mode_dependent_address_p (XEXP (from
, 0)))
895 || GET_CODE (from
) == REG
896 || GET_CODE (from
) == SUBREG
))
897 from
= force_reg (from_mode
, from
);
898 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
902 /* Handle pointer conversion */ /* SPEE 900220 */
903 if (to_mode
== PQImode
)
905 if (from_mode
!= QImode
)
906 from
= convert_to_mode (QImode
, from
, unsignedp
);
908 #ifdef HAVE_truncqipqi2
909 if (HAVE_truncqipqi2
)
911 emit_unop_insn (CODE_FOR_truncqipqi2
, to
, from
, UNKNOWN
);
914 #endif /* HAVE_truncqipqi2 */
918 if (from_mode
== PQImode
)
920 if (to_mode
!= QImode
)
922 from
= convert_to_mode (QImode
, from
, unsignedp
);
927 #ifdef HAVE_extendpqiqi2
928 if (HAVE_extendpqiqi2
)
930 emit_unop_insn (CODE_FOR_extendpqiqi2
, to
, from
, UNKNOWN
);
933 #endif /* HAVE_extendpqiqi2 */
938 if (to_mode
== PSImode
)
940 if (from_mode
!= SImode
)
941 from
= convert_to_mode (SImode
, from
, unsignedp
);
943 #ifdef HAVE_truncsipsi2
944 if (HAVE_truncsipsi2
)
946 emit_unop_insn (CODE_FOR_truncsipsi2
, to
, from
, UNKNOWN
);
949 #endif /* HAVE_truncsipsi2 */
953 if (from_mode
== PSImode
)
955 if (to_mode
!= SImode
)
957 from
= convert_to_mode (SImode
, from
, unsignedp
);
962 #ifdef HAVE_extendpsisi2
963 if (HAVE_extendpsisi2
)
965 emit_unop_insn (CODE_FOR_extendpsisi2
, to
, from
, UNKNOWN
);
968 #endif /* HAVE_extendpsisi2 */
973 if (to_mode
== PDImode
)
975 if (from_mode
!= DImode
)
976 from
= convert_to_mode (DImode
, from
, unsignedp
);
978 #ifdef HAVE_truncdipdi2
979 if (HAVE_truncdipdi2
)
981 emit_unop_insn (CODE_FOR_truncdipdi2
, to
, from
, UNKNOWN
);
984 #endif /* HAVE_truncdipdi2 */
988 if (from_mode
== PDImode
)
990 if (to_mode
!= DImode
)
992 from
= convert_to_mode (DImode
, from
, unsignedp
);
997 #ifdef HAVE_extendpdidi2
998 if (HAVE_extendpdidi2
)
1000 emit_unop_insn (CODE_FOR_extendpdidi2
, to
, from
, UNKNOWN
);
1003 #endif /* HAVE_extendpdidi2 */
1008 /* Now follow all the conversions between integers
1009 no more than a word long. */
1011 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1012 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
1013 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1014 GET_MODE_BITSIZE (from_mode
)))
1016 if (!((GET_CODE (from
) == MEM
1017 && ! MEM_VOLATILE_P (from
)
1018 && direct_load
[(int) to_mode
]
1019 && ! mode_dependent_address_p (XEXP (from
, 0)))
1020 || GET_CODE (from
) == REG
1021 || GET_CODE (from
) == SUBREG
))
1022 from
= force_reg (from_mode
, from
);
1023 if (GET_CODE (from
) == REG
&& REGNO (from
) < FIRST_PSEUDO_REGISTER
1024 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
1025 from
= copy_to_reg (from
);
1026 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
1030 /* Handle extension. */
1031 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
1033 /* Convert directly if that works. */
1034 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
1035 != CODE_FOR_nothing
)
1037 emit_unop_insn (code
, to
, from
, equiv_code
);
1042 enum machine_mode intermediate
;
1046 /* Search for a mode to convert via. */
1047 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
1048 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
1049 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
1050 != CODE_FOR_nothing
)
1051 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
1052 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1053 GET_MODE_BITSIZE (intermediate
))))
1054 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
1055 != CODE_FOR_nothing
))
1057 convert_move (to
, convert_to_mode (intermediate
, from
,
1058 unsignedp
), unsignedp
);
1062 /* No suitable intermediate mode.
1063 Generate what we need with shifts. */
1064 shift_amount
= build_int_2 (GET_MODE_BITSIZE (to_mode
)
1065 - GET_MODE_BITSIZE (from_mode
), 0);
1066 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
1067 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
1069 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
1072 emit_move_insn (to
, tmp
);
1077 /* Support special truncate insns for certain modes. */
1079 if (from_mode
== DImode
&& to_mode
== SImode
)
1081 #ifdef HAVE_truncdisi2
1082 if (HAVE_truncdisi2
)
1084 emit_unop_insn (CODE_FOR_truncdisi2
, to
, from
, UNKNOWN
);
1088 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1092 if (from_mode
== DImode
&& to_mode
== HImode
)
1094 #ifdef HAVE_truncdihi2
1095 if (HAVE_truncdihi2
)
1097 emit_unop_insn (CODE_FOR_truncdihi2
, to
, from
, UNKNOWN
);
1101 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1105 if (from_mode
== DImode
&& to_mode
== QImode
)
1107 #ifdef HAVE_truncdiqi2
1108 if (HAVE_truncdiqi2
)
1110 emit_unop_insn (CODE_FOR_truncdiqi2
, to
, from
, UNKNOWN
);
1114 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1118 if (from_mode
== SImode
&& to_mode
== HImode
)
1120 #ifdef HAVE_truncsihi2
1121 if (HAVE_truncsihi2
)
1123 emit_unop_insn (CODE_FOR_truncsihi2
, to
, from
, UNKNOWN
);
1127 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1131 if (from_mode
== SImode
&& to_mode
== QImode
)
1133 #ifdef HAVE_truncsiqi2
1134 if (HAVE_truncsiqi2
)
1136 emit_unop_insn (CODE_FOR_truncsiqi2
, to
, from
, UNKNOWN
);
1140 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1144 if (from_mode
== HImode
&& to_mode
== QImode
)
1146 #ifdef HAVE_trunchiqi2
1147 if (HAVE_trunchiqi2
)
1149 emit_unop_insn (CODE_FOR_trunchiqi2
, to
, from
, UNKNOWN
);
1153 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1157 if (from_mode
== TImode
&& to_mode
== DImode
)
1159 #ifdef HAVE_trunctidi2
1160 if (HAVE_trunctidi2
)
1162 emit_unop_insn (CODE_FOR_trunctidi2
, to
, from
, UNKNOWN
);
1166 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1170 if (from_mode
== TImode
&& to_mode
== SImode
)
1172 #ifdef HAVE_trunctisi2
1173 if (HAVE_trunctisi2
)
1175 emit_unop_insn (CODE_FOR_trunctisi2
, to
, from
, UNKNOWN
);
1179 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1183 if (from_mode
== TImode
&& to_mode
== HImode
)
1185 #ifdef HAVE_trunctihi2
1186 if (HAVE_trunctihi2
)
1188 emit_unop_insn (CODE_FOR_trunctihi2
, to
, from
, UNKNOWN
);
1192 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1196 if (from_mode
== TImode
&& to_mode
== QImode
)
1198 #ifdef HAVE_trunctiqi2
1199 if (HAVE_trunctiqi2
)
1201 emit_unop_insn (CODE_FOR_trunctiqi2
, to
, from
, UNKNOWN
);
1205 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1209 /* Handle truncation of volatile memrefs, and so on;
1210 the things that couldn't be truncated directly,
1211 and for which there was no special instruction. */
1212 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
1214 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
1215 emit_move_insn (to
, temp
);
1219 /* Mode combination is not recognized. */
1223 /* Return an rtx for a value that would result
1224 from converting X to mode MODE.
1225 Both X and MODE may be floating, or both integer.
1226 UNSIGNEDP is nonzero if X is an unsigned value.
1227 This can be done by referring to a part of X in place
1228 or by copying to a new temporary with conversion.
1230 This function *must not* call protect_from_queue
1231 except when putting X into an insn (in which case convert_move does it). */
1234 convert_to_mode (mode
, x
, unsignedp
)
1235 enum machine_mode mode
;
1239 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
1242 /* Return an rtx for a value that would result
1243 from converting X from mode OLDMODE to mode MODE.
1244 Both modes may be floating, or both integer.
1245 UNSIGNEDP is nonzero if X is an unsigned value.
1247 This can be done by referring to a part of X in place
1248 or by copying to a new temporary with conversion.
1250 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1252 This function *must not* call protect_from_queue
1253 except when putting X into an insn (in which case convert_move does it). */
1256 convert_modes (mode
, oldmode
, x
, unsignedp
)
1257 enum machine_mode mode
, oldmode
;
1263 /* If FROM is a SUBREG that indicates that we have already done at least
1264 the required extension, strip it. */
1266 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
1267 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
1268 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
1269 x
= gen_lowpart (mode
, x
);
1271 if (GET_MODE (x
) != VOIDmode
)
1272 oldmode
= GET_MODE (x
);
1274 if (mode
== oldmode
)
1277 /* There is one case that we must handle specially: If we are converting
1278 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1279 we are to interpret the constant as unsigned, gen_lowpart will do
1280 the wrong if the constant appears negative. What we want to do is
1281 make the high-order word of the constant zero, not all ones. */
1283 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
1284 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
1285 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
1287 HOST_WIDE_INT val
= INTVAL (x
);
1289 if (oldmode
!= VOIDmode
1290 && HOST_BITS_PER_WIDE_INT
> GET_MODE_BITSIZE (oldmode
))
1292 int width
= GET_MODE_BITSIZE (oldmode
);
1294 /* We need to zero extend VAL. */
1295 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1298 return immed_double_const (val
, (HOST_WIDE_INT
) 0, mode
);
1301 /* We can do this with a gen_lowpart if both desired and current modes
1302 are integer, and this is either a constant integer, a register, or a
1303 non-volatile MEM. Except for the constant case where MODE is no
1304 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1306 if ((GET_CODE (x
) == CONST_INT
1307 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1308 || (GET_MODE_CLASS (mode
) == MODE_INT
1309 && GET_MODE_CLASS (oldmode
) == MODE_INT
1310 && (GET_CODE (x
) == CONST_DOUBLE
1311 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
1312 && ((GET_CODE (x
) == MEM
&& ! MEM_VOLATILE_P (x
)
1313 && direct_load
[(int) mode
])
1314 || (GET_CODE (x
) == REG
1315 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
1316 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
1318 /* ?? If we don't know OLDMODE, we have to assume here that
1319 X does not need sign- or zero-extension. This may not be
1320 the case, but it's the best we can do. */
1321 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
1322 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
1324 HOST_WIDE_INT val
= INTVAL (x
);
1325 int width
= GET_MODE_BITSIZE (oldmode
);
1327 /* We must sign or zero-extend in this case. Start by
1328 zero-extending, then sign extend if we need to. */
1329 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1331 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
1332 val
|= (HOST_WIDE_INT
) (-1) << width
;
1334 return GEN_INT (val
);
1337 return gen_lowpart (mode
, x
);
1340 temp
= gen_reg_rtx (mode
);
1341 convert_move (temp
, x
, unsignedp
);
1346 /* This macro is used to determine what the largest unit size that
1347 move_by_pieces can use is. */
1349 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1350 move efficiently, as opposed to MOVE_MAX which is the maximum
1351 number of bhytes we can move with a single instruction. */
1353 #ifndef MOVE_MAX_PIECES
1354 #define MOVE_MAX_PIECES MOVE_MAX
1357 /* Generate several move instructions to copy LEN bytes
1358 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1359 The caller must pass FROM and TO
1360 through protect_from_queue before calling.
1361 ALIGN (in bytes) is maximum alignment we can assume. */
1364 move_by_pieces (to
, from
, len
, align
)
1368 struct move_by_pieces data
;
1369 rtx to_addr
= XEXP (to
, 0), from_addr
= XEXP (from
, 0);
1370 int max_size
= MOVE_MAX_PIECES
+ 1;
1371 enum machine_mode mode
= VOIDmode
, tmode
;
1372 enum insn_code icode
;
1375 data
.to_addr
= to_addr
;
1376 data
.from_addr
= from_addr
;
1380 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1381 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1383 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
1384 || GET_CODE (from_addr
) == POST_INC
1385 || GET_CODE (from_addr
) == POST_DEC
);
1387 data
.explicit_inc_from
= 0;
1388 data
.explicit_inc_to
= 0;
1390 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1391 if (data
.reverse
) data
.offset
= len
;
1394 data
.to_struct
= MEM_IN_STRUCT_P (to
);
1395 data
.from_struct
= MEM_IN_STRUCT_P (from
);
1396 data
.to_readonly
= RTX_UNCHANGING_P (to
);
1397 data
.from_readonly
= RTX_UNCHANGING_P (from
);
1399 /* If copying requires more than two move insns,
1400 copy addresses to registers (to make displacements shorter)
1401 and use post-increment if available. */
1402 if (!(data
.autinc_from
&& data
.autinc_to
)
1403 && move_by_pieces_ninsns (len
, align
) > 2)
1405 /* Find the mode of the largest move... */
1406 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1407 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1408 if (GET_MODE_SIZE (tmode
) < max_size
)
1411 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
1413 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
1414 data
.autinc_from
= 1;
1415 data
.explicit_inc_from
= -1;
1417 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
1419 data
.from_addr
= copy_addr_to_reg (from_addr
);
1420 data
.autinc_from
= 1;
1421 data
.explicit_inc_from
= 1;
1423 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
1424 data
.from_addr
= copy_addr_to_reg (from_addr
);
1425 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
1427 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
1429 data
.explicit_inc_to
= -1;
1431 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
1433 data
.to_addr
= copy_addr_to_reg (to_addr
);
1435 data
.explicit_inc_to
= 1;
1437 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
1438 data
.to_addr
= copy_addr_to_reg (to_addr
);
1441 if (! SLOW_UNALIGNED_ACCESS
1442 || align
> MOVE_MAX
|| align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
)
1445 /* First move what we can in the largest integer mode, then go to
1446 successively smaller modes. */
1448 while (max_size
> 1)
1450 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1451 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1452 if (GET_MODE_SIZE (tmode
) < max_size
)
1455 if (mode
== VOIDmode
)
1458 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1459 if (icode
!= CODE_FOR_nothing
1460 && align
>= MIN (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
,
1461 GET_MODE_SIZE (mode
)))
1462 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
1464 max_size
= GET_MODE_SIZE (mode
);
1467 /* The code above should have handled everything. */
1472 /* Return number of insns required to move L bytes by pieces.
1473 ALIGN (in bytes) is maximum alignment we can assume. */
1476 move_by_pieces_ninsns (l
, align
)
1480 register int n_insns
= 0;
1481 int max_size
= MOVE_MAX
+ 1;
1483 if (! SLOW_UNALIGNED_ACCESS
1484 || align
> MOVE_MAX
|| align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
)
1487 while (max_size
> 1)
1489 enum machine_mode mode
= VOIDmode
, tmode
;
1490 enum insn_code icode
;
1492 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1493 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1494 if (GET_MODE_SIZE (tmode
) < max_size
)
1497 if (mode
== VOIDmode
)
1500 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1501 if (icode
!= CODE_FOR_nothing
1502 && align
>= MIN (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
,
1503 GET_MODE_SIZE (mode
)))
1504 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1506 max_size
= GET_MODE_SIZE (mode
);
1512 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1513 with move instructions for mode MODE. GENFUN is the gen_... function
1514 to make a move insn for that mode. DATA has all the other info. */
1517 move_by_pieces_1 (genfun
, mode
, data
)
1518 rtx (*genfun
) PROTO ((rtx
, ...));
1519 enum machine_mode mode
;
1520 struct move_by_pieces
*data
;
1522 register int size
= GET_MODE_SIZE (mode
);
1523 register rtx to1
, from1
;
1525 while (data
->len
>= size
)
1527 if (data
->reverse
) data
->offset
-= size
;
1529 to1
= (data
->autinc_to
1530 ? gen_rtx_MEM (mode
, data
->to_addr
)
1531 : copy_rtx (change_address (data
->to
, mode
,
1532 plus_constant (data
->to_addr
,
1534 MEM_IN_STRUCT_P (to1
) = data
->to_struct
;
1535 RTX_UNCHANGING_P (to1
) = data
->to_readonly
;
1538 = (data
->autinc_from
1539 ? gen_rtx_MEM (mode
, data
->from_addr
)
1540 : copy_rtx (change_address (data
->from
, mode
,
1541 plus_constant (data
->from_addr
,
1543 MEM_IN_STRUCT_P (from1
) = data
->from_struct
;
1544 RTX_UNCHANGING_P (from1
) = data
->from_readonly
;
1546 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1547 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (-size
)));
1548 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1549 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (-size
)));
1551 emit_insn ((*genfun
) (to1
, from1
));
1552 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1553 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1554 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1555 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1557 if (! data
->reverse
) data
->offset
+= size
;
1563 /* Emit code to move a block Y to a block X.
1564 This may be done with string-move instructions,
1565 with multiple scalar move instructions, or with a library call.
1567 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1569 SIZE is an rtx that says how long they are.
1570 ALIGN is the maximum alignment we can assume they have,
1573 Return the address of the new block, if memcpy is called and returns it,
1577 emit_block_move (x
, y
, size
, align
)
1583 #ifdef TARGET_MEM_FUNCTIONS
1585 tree call_expr
, arg_list
;
1588 if (GET_MODE (x
) != BLKmode
)
1591 if (GET_MODE (y
) != BLKmode
)
1594 x
= protect_from_queue (x
, 1);
1595 y
= protect_from_queue (y
, 0);
1596 size
= protect_from_queue (size
, 0);
1598 if (GET_CODE (x
) != MEM
)
1600 if (GET_CODE (y
) != MEM
)
1605 if (GET_CODE (size
) == CONST_INT
&& MOVE_BY_PIECES_P (INTVAL (size
), align
))
1606 move_by_pieces (x
, y
, INTVAL (size
), align
);
1609 /* Try the most limited insn first, because there's no point
1610 including more than one in the machine description unless
1611 the more limited one has some advantage. */
1613 rtx opalign
= GEN_INT (align
);
1614 enum machine_mode mode
;
1616 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1617 mode
= GET_MODE_WIDER_MODE (mode
))
1619 enum insn_code code
= movstr_optab
[(int) mode
];
1620 insn_operand_predicate_fn pred
;
1622 if (code
!= CODE_FOR_nothing
1623 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1624 here because if SIZE is less than the mode mask, as it is
1625 returned by the macro, it will definitely be less than the
1626 actual mode mask. */
1627 && ((GET_CODE (size
) == CONST_INT
1628 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1629 <= (GET_MODE_MASK (mode
) >> 1)))
1630 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1631 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
1632 || (*pred
) (x
, BLKmode
))
1633 && ((pred
= insn_data
[(int) code
].operand
[1].predicate
) == 0
1634 || (*pred
) (y
, BLKmode
))
1635 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
1636 || (*pred
) (opalign
, VOIDmode
)))
1639 rtx last
= get_last_insn ();
1642 op2
= convert_to_mode (mode
, size
, 1);
1643 pred
= insn_data
[(int) code
].operand
[2].predicate
;
1644 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
1645 op2
= copy_to_mode_reg (mode
, op2
);
1647 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1654 delete_insns_since (last
);
1658 /* X, Y, or SIZE may have been passed through protect_from_queue.
1660 It is unsafe to save the value generated by protect_from_queue
1661 and reuse it later. Consider what happens if emit_queue is
1662 called before the return value from protect_from_queue is used.
1664 Expansion of the CALL_EXPR below will call emit_queue before
1665 we are finished emitting RTL for argument setup. So if we are
1666 not careful we could get the wrong value for an argument.
1668 To avoid this problem we go ahead and emit code to copy X, Y &
1669 SIZE into new pseudos. We can then place those new pseudos
1670 into an RTL_EXPR and use them later, even after a call to
1673 Note this is not strictly needed for library calls since they
1674 do not call emit_queue before loading their arguments. However,
1675 we may need to have library calls call emit_queue in the future
1676 since failing to do so could cause problems for targets which
1677 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1678 x
= copy_to_mode_reg (Pmode
, XEXP (x
, 0));
1679 y
= copy_to_mode_reg (Pmode
, XEXP (y
, 0));
1681 #ifdef TARGET_MEM_FUNCTIONS
1682 size
= copy_to_mode_reg (TYPE_MODE (sizetype
), size
);
1684 size
= convert_to_mode (TYPE_MODE (integer_type_node
), size
,
1685 TREE_UNSIGNED (integer_type_node
));
1686 size
= copy_to_mode_reg (TYPE_MODE (integer_type_node
), size
);
1689 #ifdef TARGET_MEM_FUNCTIONS
1690 /* It is incorrect to use the libcall calling conventions to call
1691 memcpy in this context.
1693 This could be a user call to memcpy and the user may wish to
1694 examine the return value from memcpy.
1696 For targets where libcalls and normal calls have different conventions
1697 for returning pointers, we could end up generating incorrect code.
1699 So instead of using a libcall sequence we build up a suitable
1700 CALL_EXPR and expand the call in the normal fashion. */
1701 if (fn
== NULL_TREE
)
1705 /* This was copied from except.c, I don't know if all this is
1706 necessary in this context or not. */
1707 fn
= get_identifier ("memcpy");
1708 push_obstacks_nochange ();
1709 end_temporary_allocation ();
1710 fntype
= build_pointer_type (void_type_node
);
1711 fntype
= build_function_type (fntype
, NULL_TREE
);
1712 fn
= build_decl (FUNCTION_DECL
, fn
, fntype
);
1713 ggc_add_tree_root (&fn
, 1);
1714 DECL_EXTERNAL (fn
) = 1;
1715 TREE_PUBLIC (fn
) = 1;
1716 DECL_ARTIFICIAL (fn
) = 1;
1717 make_decl_rtl (fn
, NULL_PTR
, 1);
1718 assemble_external (fn
);
1722 /* We need to make an argument list for the function call.
1724 memcpy has three arguments, the first two are void * addresses and
1725 the last is a size_t byte count for the copy. */
1727 = build_tree_list (NULL_TREE
,
1728 make_tree (build_pointer_type (void_type_node
), x
));
1729 TREE_CHAIN (arg_list
)
1730 = build_tree_list (NULL_TREE
,
1731 make_tree (build_pointer_type (void_type_node
), y
));
1732 TREE_CHAIN (TREE_CHAIN (arg_list
))
1733 = build_tree_list (NULL_TREE
, make_tree (sizetype
, size
));
1734 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list
))) = NULL_TREE
;
1736 /* Now we have to build up the CALL_EXPR itself. */
1737 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
1738 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
1739 call_expr
, arg_list
, NULL_TREE
);
1740 TREE_SIDE_EFFECTS (call_expr
) = 1;
1742 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
1744 emit_library_call (bcopy_libfunc
, 0,
1745 VOIDmode
, 3, y
, Pmode
, x
, Pmode
,
1746 convert_to_mode (TYPE_MODE (integer_type_node
), size
,
1747 TREE_UNSIGNED (integer_type_node
)),
1748 TYPE_MODE (integer_type_node
));
1755 /* Copy all or part of a value X into registers starting at REGNO.
1756 The number of registers to be filled is NREGS. */
1759 move_block_to_reg (regno
, x
, nregs
, mode
)
1763 enum machine_mode mode
;
1766 #ifdef HAVE_load_multiple
1774 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
1775 x
= validize_mem (force_const_mem (mode
, x
));
1777 /* See if the machine can do this with a load multiple insn. */
1778 #ifdef HAVE_load_multiple
1779 if (HAVE_load_multiple
)
1781 last
= get_last_insn ();
1782 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
1790 delete_insns_since (last
);
1794 for (i
= 0; i
< nregs
; i
++)
1795 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
1796 operand_subword_force (x
, i
, mode
));
1799 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1800 The number of registers to be filled is NREGS. SIZE indicates the number
1801 of bytes in the object X. */
1805 move_block_from_reg (regno
, x
, nregs
, size
)
1812 #ifdef HAVE_store_multiple
1816 enum machine_mode mode
;
1818 /* If SIZE is that of a mode no bigger than a word, just use that
1819 mode's store operation. */
1820 if (size
<= UNITS_PER_WORD
1821 && (mode
= mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
, 0)) != BLKmode
)
1823 emit_move_insn (change_address (x
, mode
, NULL
),
1824 gen_rtx_REG (mode
, regno
));
1828 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1829 to the left before storing to memory. Note that the previous test
1830 doesn't handle all cases (e.g. SIZE == 3). */
1831 if (size
< UNITS_PER_WORD
&& BYTES_BIG_ENDIAN
)
1833 rtx tem
= operand_subword (x
, 0, 1, BLKmode
);
1839 shift
= expand_shift (LSHIFT_EXPR
, word_mode
,
1840 gen_rtx_REG (word_mode
, regno
),
1841 build_int_2 ((UNITS_PER_WORD
- size
)
1842 * BITS_PER_UNIT
, 0), NULL_RTX
, 0);
1843 emit_move_insn (tem
, shift
);
1847 /* See if the machine can do this with a store multiple insn. */
1848 #ifdef HAVE_store_multiple
1849 if (HAVE_store_multiple
)
1851 last
= get_last_insn ();
1852 pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
1860 delete_insns_since (last
);
1864 for (i
= 0; i
< nregs
; i
++)
1866 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1871 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
1875 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1876 registers represented by a PARALLEL. SSIZE represents the total size of
1877 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1879 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1880 the balance will be in what would be the low-order memory addresses, i.e.
1881 left justified for big endian, right justified for little endian. This
1882 happens to be true for the targets currently using this support. If this
1883 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1887 emit_group_load (dst
, orig_src
, ssize
, align
)
1894 if (GET_CODE (dst
) != PARALLEL
)
1897 /* Check for a NULL entry, used to indicate that the parameter goes
1898 both on the stack and in registers. */
1899 if (XEXP (XVECEXP (dst
, 0, 0), 0))
1904 tmps
= (rtx
*) alloca (sizeof(rtx
) * XVECLEN (dst
, 0));
1906 /* If we won't be loading directly from memory, protect the real source
1907 from strange tricks we might play. */
1909 if (GET_CODE (src
) != MEM
)
1911 src
= gen_reg_rtx (GET_MODE (orig_src
));
1912 emit_move_insn (src
, orig_src
);
1915 /* Process the pieces. */
1916 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1918 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
1919 int bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
1920 int bytelen
= GET_MODE_SIZE (mode
);
1923 /* Handle trailing fragments that run over the size of the struct. */
1924 if (ssize
>= 0 && bytepos
+ bytelen
> ssize
)
1926 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1927 bytelen
= ssize
- bytepos
;
1932 /* Optimize the access just a bit. */
1933 if (GET_CODE (src
) == MEM
1934 && align
*BITS_PER_UNIT
>= GET_MODE_ALIGNMENT (mode
)
1935 && bytepos
*BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
1936 && bytelen
== GET_MODE_SIZE (mode
))
1938 tmps
[i
] = gen_reg_rtx (mode
);
1939 emit_move_insn (tmps
[i
],
1940 change_address (src
, mode
,
1941 plus_constant (XEXP (src
, 0),
1944 else if (GET_CODE (src
) == CONCAT
)
1947 && bytelen
== GET_MODE_SIZE (GET_MODE (XEXP (src
, 0))))
1948 tmps
[i
] = XEXP (src
, 0);
1949 else if (bytepos
== GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)))
1950 && bytelen
== GET_MODE_SIZE (GET_MODE (XEXP (src
, 1))))
1951 tmps
[i
] = XEXP (src
, 1);
1957 tmps
[i
] = extract_bit_field (src
, bytelen
*BITS_PER_UNIT
,
1958 bytepos
*BITS_PER_UNIT
, 1, NULL_RTX
,
1959 mode
, mode
, align
, ssize
);
1962 if (BYTES_BIG_ENDIAN
&& shift
)
1964 expand_binop (mode
, ashl_optab
, tmps
[i
], GEN_INT (shift
),
1965 tmps
[i
], 0, OPTAB_WIDEN
);
1970 /* Copy the extracted pieces into the proper (probable) hard regs. */
1971 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1972 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0), tmps
[i
]);
1975 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1976 registers represented by a PARALLEL. SSIZE represents the total size of
1977 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
1980 emit_group_store (orig_dst
, src
, ssize
, align
)
1987 if (GET_CODE (src
) != PARALLEL
)
1990 /* Check for a NULL entry, used to indicate that the parameter goes
1991 both on the stack and in registers. */
1992 if (XEXP (XVECEXP (src
, 0, 0), 0))
1997 tmps
= (rtx
*) alloca (sizeof(rtx
) * XVECLEN (src
, 0));
1999 /* Copy the (probable) hard regs into pseudos. */
2000 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2002 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
2003 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
2004 emit_move_insn (tmps
[i
], reg
);
2008 /* If we won't be storing directly into memory, protect the real destination
2009 from strange tricks we might play. */
2011 if (GET_CODE (dst
) == PARALLEL
)
2015 /* We can get a PARALLEL dst if there is a conditional expression in
2016 a return statement. In that case, the dst and src are the same,
2017 so no action is necessary. */
2018 if (rtx_equal_p (dst
, src
))
2021 /* It is unclear if we can ever reach here, but we may as well handle
2022 it. Allocate a temporary, and split this into a store/load to/from
2025 temp
= assign_stack_temp (GET_MODE (dst
), ssize
, 0);
2026 emit_group_store (temp
, src
, ssize
, align
);
2027 emit_group_load (dst
, temp
, ssize
, align
);
2030 else if (GET_CODE (dst
) != MEM
)
2032 dst
= gen_reg_rtx (GET_MODE (orig_dst
));
2033 /* Make life a bit easier for combine. */
2034 emit_move_insn (dst
, const0_rtx
);
2036 else if (! MEM_IN_STRUCT_P (dst
))
2038 /* store_bit_field requires that memory operations have
2039 mem_in_struct_p set; we might not. */
2041 dst
= copy_rtx (orig_dst
);
2042 MEM_SET_IN_STRUCT_P (dst
, 1);
2045 /* Process the pieces. */
2046 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2048 int bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
2049 enum machine_mode mode
= GET_MODE (tmps
[i
]);
2050 int bytelen
= GET_MODE_SIZE (mode
);
2052 /* Handle trailing fragments that run over the size of the struct. */
2053 if (ssize
>= 0 && bytepos
+ bytelen
> ssize
)
2055 if (BYTES_BIG_ENDIAN
)
2057 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2058 expand_binop (mode
, ashr_optab
, tmps
[i
], GEN_INT (shift
),
2059 tmps
[i
], 0, OPTAB_WIDEN
);
2061 bytelen
= ssize
- bytepos
;
2064 /* Optimize the access just a bit. */
2065 if (GET_CODE (dst
) == MEM
2066 && align
*BITS_PER_UNIT
>= GET_MODE_ALIGNMENT (mode
)
2067 && bytepos
*BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2068 && bytelen
== GET_MODE_SIZE (mode
))
2070 emit_move_insn (change_address (dst
, mode
,
2071 plus_constant (XEXP (dst
, 0),
2077 store_bit_field (dst
, bytelen
*BITS_PER_UNIT
, bytepos
*BITS_PER_UNIT
,
2078 mode
, tmps
[i
], align
, ssize
);
2083 /* Copy from the pseudo into the (probable) hard reg. */
2084 if (GET_CODE (dst
) == REG
)
2085 emit_move_insn (orig_dst
, dst
);
2088 /* Generate code to copy a BLKmode object of TYPE out of a
2089 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2090 is null, a stack temporary is created. TGTBLK is returned.
2092 The primary purpose of this routine is to handle functions
2093 that return BLKmode structures in registers. Some machines
2094 (the PA for example) want to return all small structures
2095 in registers regardless of the structure's alignment.
2099 copy_blkmode_from_reg(tgtblk
,srcreg
,type
)
2104 int bytes
= int_size_in_bytes (type
);
2105 rtx src
= NULL
, dst
= NULL
;
2106 int bitsize
= MIN (TYPE_ALIGN (type
), (unsigned int) BITS_PER_WORD
);
2107 int bitpos
, xbitpos
, big_endian_correction
= 0;
2111 tgtblk
= assign_stack_temp (BLKmode
, bytes
, 0);
2112 MEM_SET_IN_STRUCT_P (tgtblk
, AGGREGATE_TYPE_P (type
));
2113 preserve_temp_slots (tgtblk
);
2116 /* This code assumes srcreg is at least a full word. If it isn't,
2117 copy it into a new pseudo which is a full word. */
2118 if (GET_MODE (srcreg
) != BLKmode
2119 && GET_MODE_SIZE (GET_MODE (srcreg
)) < UNITS_PER_WORD
)
2120 srcreg
= convert_to_mode (word_mode
, srcreg
,
2121 TREE_UNSIGNED (type
));
2123 /* Structures whose size is not a multiple of a word are aligned
2124 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2125 machine, this means we must skip the empty high order bytes when
2126 calculating the bit offset. */
2127 if (BYTES_BIG_ENDIAN
&& bytes
% UNITS_PER_WORD
)
2128 big_endian_correction
= (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
)
2131 /* Copy the structure BITSIZE bites at a time.
2133 We could probably emit more efficient code for machines
2134 which do not use strict alignment, but it doesn't seem
2135 worth the effort at the current time. */
2136 for (bitpos
= 0, xbitpos
= big_endian_correction
;
2137 bitpos
< bytes
* BITS_PER_UNIT
;
2138 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2141 /* We need a new source operand each time xbitpos is on a
2142 word boundary and when xbitpos == big_endian_correction
2143 (the first time through). */
2144 if (xbitpos
% BITS_PER_WORD
== 0
2145 || xbitpos
== big_endian_correction
)
2146 src
= operand_subword_force (srcreg
,
2147 xbitpos
/ BITS_PER_WORD
,
2150 /* We need a new destination operand each time bitpos is on
2152 if (bitpos
% BITS_PER_WORD
== 0)
2153 dst
= operand_subword (tgtblk
, bitpos
/ BITS_PER_WORD
, 1, BLKmode
);
2155 /* Use xbitpos for the source extraction (right justified) and
2156 xbitpos for the destination store (left justified). */
2157 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, word_mode
,
2158 extract_bit_field (src
, bitsize
,
2159 xbitpos
% BITS_PER_WORD
, 1,
2160 NULL_RTX
, word_mode
,
2162 bitsize
/ BITS_PER_UNIT
,
2164 bitsize
/ BITS_PER_UNIT
, BITS_PER_WORD
);
2170 /* Add a USE expression for REG to the (possibly empty) list pointed
2171 to by CALL_FUSAGE. REG must denote a hard register. */
2174 use_reg (call_fusage
, reg
)
2175 rtx
*call_fusage
, reg
;
2177 if (GET_CODE (reg
) != REG
2178 || REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
2182 = gen_rtx_EXPR_LIST (VOIDmode
,
2183 gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2186 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2187 starting at REGNO. All of these registers must be hard registers. */
2190 use_regs (call_fusage
, regno
, nregs
)
2197 if (regno
+ nregs
> FIRST_PSEUDO_REGISTER
)
2200 for (i
= 0; i
< nregs
; i
++)
2201 use_reg (call_fusage
, gen_rtx_REG (reg_raw_mode
[regno
+ i
], regno
+ i
));
2204 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2205 PARALLEL REGS. This is for calls that pass values in multiple
2206 non-contiguous locations. The Irix 6 ABI has examples of this. */
2209 use_group_regs (call_fusage
, regs
)
2215 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2217 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2219 /* A NULL entry means the parameter goes both on the stack and in
2220 registers. This can also be a MEM for targets that pass values
2221 partially on the stack and partially in registers. */
2222 if (reg
!= 0 && GET_CODE (reg
) == REG
)
2223 use_reg (call_fusage
, reg
);
2227 /* Generate several move instructions to clear LEN bytes of block TO.
2228 (A MEM rtx with BLKmode). The caller must pass TO through
2229 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2233 clear_by_pieces (to
, len
, align
)
2237 struct clear_by_pieces data
;
2238 rtx to_addr
= XEXP (to
, 0);
2239 int max_size
= MOVE_MAX_PIECES
+ 1;
2240 enum machine_mode mode
= VOIDmode
, tmode
;
2241 enum insn_code icode
;
2244 data
.to_addr
= to_addr
;
2247 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2248 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2250 data
.explicit_inc_to
= 0;
2252 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2253 if (data
.reverse
) data
.offset
= len
;
2256 data
.to_struct
= MEM_IN_STRUCT_P (to
);
2258 /* If copying requires more than two move insns,
2259 copy addresses to registers (to make displacements shorter)
2260 and use post-increment if available. */
2262 && move_by_pieces_ninsns (len
, align
) > 2)
2264 /* Determine the main mode we'll be using */
2265 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2266 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2267 if (GET_MODE_SIZE (tmode
) < max_size
)
2270 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
2272 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
2274 data
.explicit_inc_to
= -1;
2276 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
2278 data
.to_addr
= copy_addr_to_reg (to_addr
);
2280 data
.explicit_inc_to
= 1;
2282 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
2283 data
.to_addr
= copy_addr_to_reg (to_addr
);
2286 if (! SLOW_UNALIGNED_ACCESS
2287 || align
> MOVE_MAX
|| align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
)
2290 /* First move what we can in the largest integer mode, then go to
2291 successively smaller modes. */
2293 while (max_size
> 1)
2295 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2296 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2297 if (GET_MODE_SIZE (tmode
) < max_size
)
2300 if (mode
== VOIDmode
)
2303 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2304 if (icode
!= CODE_FOR_nothing
2305 && align
>= MIN (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
,
2306 GET_MODE_SIZE (mode
)))
2307 clear_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
2309 max_size
= GET_MODE_SIZE (mode
);
2312 /* The code above should have handled everything. */
2317 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2318 with move instructions for mode MODE. GENFUN is the gen_... function
2319 to make a move insn for that mode. DATA has all the other info. */
2322 clear_by_pieces_1 (genfun
, mode
, data
)
2323 rtx (*genfun
) PROTO ((rtx
, ...));
2324 enum machine_mode mode
;
2325 struct clear_by_pieces
*data
;
2327 register int size
= GET_MODE_SIZE (mode
);
2330 while (data
->len
>= size
)
2332 if (data
->reverse
) data
->offset
-= size
;
2334 to1
= (data
->autinc_to
2335 ? gen_rtx_MEM (mode
, data
->to_addr
)
2336 : copy_rtx (change_address (data
->to
, mode
,
2337 plus_constant (data
->to_addr
,
2339 MEM_IN_STRUCT_P (to1
) = data
->to_struct
;
2341 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2342 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (-size
)));
2344 emit_insn ((*genfun
) (to1
, const0_rtx
));
2345 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2346 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2348 if (! data
->reverse
) data
->offset
+= size
;
2354 /* Write zeros through the storage of OBJECT.
2355 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2356 the maximum alignment we can is has, measured in bytes.
2358 If we call a function that returns the length of the block, return it. */
2361 clear_storage (object
, size
, align
)
2366 #ifdef TARGET_MEM_FUNCTIONS
2368 tree call_expr
, arg_list
;
2372 if (GET_MODE (object
) == BLKmode
)
2374 object
= protect_from_queue (object
, 1);
2375 size
= protect_from_queue (size
, 0);
2377 if (GET_CODE (size
) == CONST_INT
2378 && MOVE_BY_PIECES_P (INTVAL (size
), align
))
2379 clear_by_pieces (object
, INTVAL (size
), align
);
2383 /* Try the most limited insn first, because there's no point
2384 including more than one in the machine description unless
2385 the more limited one has some advantage. */
2387 rtx opalign
= GEN_INT (align
);
2388 enum machine_mode mode
;
2390 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2391 mode
= GET_MODE_WIDER_MODE (mode
))
2393 enum insn_code code
= clrstr_optab
[(int) mode
];
2394 insn_operand_predicate_fn pred
;
2396 if (code
!= CODE_FOR_nothing
2397 /* We don't need MODE to be narrower than
2398 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2399 the mode mask, as it is returned by the macro, it will
2400 definitely be less than the actual mode mask. */
2401 && ((GET_CODE (size
) == CONST_INT
2402 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2403 <= (GET_MODE_MASK (mode
) >> 1)))
2404 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2405 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
2406 || (*pred
) (object
, BLKmode
))
2407 && ((pred
= insn_data
[(int) code
].operand
[2].predicate
) == 0
2408 || (*pred
) (opalign
, VOIDmode
)))
2411 rtx last
= get_last_insn ();
2414 op1
= convert_to_mode (mode
, size
, 1);
2415 pred
= insn_data
[(int) code
].operand
[1].predicate
;
2416 if (pred
!= 0 && ! (*pred
) (op1
, mode
))
2417 op1
= copy_to_mode_reg (mode
, op1
);
2419 pat
= GEN_FCN ((int) code
) (object
, op1
, opalign
);
2426 delete_insns_since (last
);
2430 /* OBJECT or SIZE may have been passed through protect_from_queue.
2432 It is unsafe to save the value generated by protect_from_queue
2433 and reuse it later. Consider what happens if emit_queue is
2434 called before the return value from protect_from_queue is used.
2436 Expansion of the CALL_EXPR below will call emit_queue before
2437 we are finished emitting RTL for argument setup. So if we are
2438 not careful we could get the wrong value for an argument.
2440 To avoid this problem we go ahead and emit code to copy OBJECT
2441 and SIZE into new pseudos. We can then place those new pseudos
2442 into an RTL_EXPR and use them later, even after a call to
2445 Note this is not strictly needed for library calls since they
2446 do not call emit_queue before loading their arguments. However,
2447 we may need to have library calls call emit_queue in the future
2448 since failing to do so could cause problems for targets which
2449 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2450 object
= copy_to_mode_reg (Pmode
, XEXP (object
, 0));
2452 #ifdef TARGET_MEM_FUNCTIONS
2453 size
= copy_to_mode_reg (TYPE_MODE (sizetype
), size
);
2455 size
= convert_to_mode (TYPE_MODE (integer_type_node
), size
,
2456 TREE_UNSIGNED (integer_type_node
));
2457 size
= copy_to_mode_reg (TYPE_MODE (integer_type_node
), size
);
2461 #ifdef TARGET_MEM_FUNCTIONS
2462 /* It is incorrect to use the libcall calling conventions to call
2463 memset in this context.
2465 This could be a user call to memset and the user may wish to
2466 examine the return value from memset.
2468 For targets where libcalls and normal calls have different
2469 conventions for returning pointers, we could end up generating
2472 So instead of using a libcall sequence we build up a suitable
2473 CALL_EXPR and expand the call in the normal fashion. */
2474 if (fn
== NULL_TREE
)
2478 /* This was copied from except.c, I don't know if all this is
2479 necessary in this context or not. */
2480 fn
= get_identifier ("memset");
2481 push_obstacks_nochange ();
2482 end_temporary_allocation ();
2483 fntype
= build_pointer_type (void_type_node
);
2484 fntype
= build_function_type (fntype
, NULL_TREE
);
2485 fn
= build_decl (FUNCTION_DECL
, fn
, fntype
);
2486 ggc_add_tree_root (&fn
, 1);
2487 DECL_EXTERNAL (fn
) = 1;
2488 TREE_PUBLIC (fn
) = 1;
2489 DECL_ARTIFICIAL (fn
) = 1;
2490 make_decl_rtl (fn
, NULL_PTR
, 1);
2491 assemble_external (fn
);
2495 /* We need to make an argument list for the function call.
2497 memset has three arguments, the first is a void * addresses, the
2498 second a integer with the initialization value, the last is a
2499 size_t byte count for the copy. */
2501 = build_tree_list (NULL_TREE
,
2502 make_tree (build_pointer_type (void_type_node
),
2504 TREE_CHAIN (arg_list
)
2505 = build_tree_list (NULL_TREE
,
2506 make_tree (integer_type_node
, const0_rtx
));
2507 TREE_CHAIN (TREE_CHAIN (arg_list
))
2508 = build_tree_list (NULL_TREE
, make_tree (sizetype
, size
));
2509 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list
))) = NULL_TREE
;
2511 /* Now we have to build up the CALL_EXPR itself. */
2512 call_expr
= build1 (ADDR_EXPR
,
2513 build_pointer_type (TREE_TYPE (fn
)), fn
);
2514 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
2515 call_expr
, arg_list
, NULL_TREE
);
2516 TREE_SIDE_EFFECTS (call_expr
) = 1;
2518 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
2520 emit_library_call (bzero_libfunc
, 0,
2521 VOIDmode
, 2, object
, Pmode
, size
,
2522 TYPE_MODE (integer_type_node
));
2527 emit_move_insn (object
, CONST0_RTX (GET_MODE (object
)));
2532 /* Generate code to copy Y into X.
2533 Both Y and X must have the same mode, except that
2534 Y can be a constant with VOIDmode.
2535 This mode cannot be BLKmode; use emit_block_move for that.
2537 Return the last instruction emitted. */
2540 emit_move_insn (x
, y
)
2543 enum machine_mode mode
= GET_MODE (x
);
2545 x
= protect_from_queue (x
, 1);
2546 y
= protect_from_queue (y
, 0);
2548 if (mode
== BLKmode
|| (GET_MODE (y
) != mode
&& GET_MODE (y
) != VOIDmode
))
2551 /* Never force constant_p_rtx to memory. */
2552 if (GET_CODE (y
) == CONSTANT_P_RTX
)
2554 else if (CONSTANT_P (y
) && ! LEGITIMATE_CONSTANT_P (y
))
2555 y
= force_const_mem (mode
, y
);
2557 /* If X or Y are memory references, verify that their addresses are valid
2559 if (GET_CODE (x
) == MEM
2560 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
2561 && ! push_operand (x
, GET_MODE (x
)))
2563 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
2564 x
= change_address (x
, VOIDmode
, XEXP (x
, 0));
2566 if (GET_CODE (y
) == MEM
2567 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
2569 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
2570 y
= change_address (y
, VOIDmode
, XEXP (y
, 0));
2572 if (mode
== BLKmode
)
2575 return emit_move_insn_1 (x
, y
);
2578 /* Low level part of emit_move_insn.
2579 Called just like emit_move_insn, but assumes X and Y
2580 are basically valid. */
2583 emit_move_insn_1 (x
, y
)
2586 enum machine_mode mode
= GET_MODE (x
);
2587 enum machine_mode submode
;
2588 enum mode_class
class = GET_MODE_CLASS (mode
);
2591 if (mode
>= MAX_MACHINE_MODE
)
2594 if (mov_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
2596 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) mode
].insn_code
) (x
, y
));
2598 /* Expand complex moves by moving real part and imag part, if possible. */
2599 else if ((class == MODE_COMPLEX_FLOAT
|| class == MODE_COMPLEX_INT
)
2600 && BLKmode
!= (submode
= mode_for_size ((GET_MODE_UNIT_SIZE (mode
)
2602 (class == MODE_COMPLEX_INT
2603 ? MODE_INT
: MODE_FLOAT
),
2605 && (mov_optab
->handlers
[(int) submode
].insn_code
2606 != CODE_FOR_nothing
))
2608 /* Don't split destination if it is a stack push. */
2609 int stack
= push_operand (x
, GET_MODE (x
));
2611 /* If this is a stack, push the highpart first, so it
2612 will be in the argument order.
2614 In that case, change_address is used only to convert
2615 the mode, not to change the address. */
2618 /* Note that the real part always precedes the imag part in memory
2619 regardless of machine's endianness. */
2620 #ifdef STACK_GROWS_DOWNWARD
2621 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2622 (gen_rtx_MEM (submode
, (XEXP (x
, 0))),
2623 gen_imagpart (submode
, y
)));
2624 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2625 (gen_rtx_MEM (submode
, (XEXP (x
, 0))),
2626 gen_realpart (submode
, y
)));
2628 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2629 (gen_rtx_MEM (submode
, (XEXP (x
, 0))),
2630 gen_realpart (submode
, y
)));
2631 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2632 (gen_rtx_MEM (submode
, (XEXP (x
, 0))),
2633 gen_imagpart (submode
, y
)));
2638 /* If this is a complex value with each part being smaller than a
2639 word, the usual calling sequence will likely pack the pieces into
2640 a single register. Unfortunately, SUBREG of hard registers only
2641 deals in terms of words, so we have a problem converting input
2642 arguments to the CONCAT of two registers that is used elsewhere
2643 for complex values. If this is before reload, we can copy it into
2644 memory and reload. FIXME, we should see about using extract and
2645 insert on integer registers, but complex short and complex char
2646 variables should be rarely used. */
2647 if (GET_MODE_BITSIZE (mode
) < 2*BITS_PER_WORD
2648 && (reload_in_progress
| reload_completed
) == 0)
2650 int packed_dest_p
= (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
);
2651 int packed_src_p
= (REG_P (y
) && REGNO (y
) < FIRST_PSEUDO_REGISTER
);
2653 if (packed_dest_p
|| packed_src_p
)
2655 enum mode_class reg_class
= ((class == MODE_COMPLEX_FLOAT
)
2656 ? MODE_FLOAT
: MODE_INT
);
2658 enum machine_mode reg_mode
=
2659 mode_for_size (GET_MODE_BITSIZE (mode
), reg_class
, 1);
2661 if (reg_mode
!= BLKmode
)
2663 rtx mem
= assign_stack_temp (reg_mode
,
2664 GET_MODE_SIZE (mode
), 0);
2666 rtx cmem
= change_address (mem
, mode
, NULL_RTX
);
2668 current_function
->cannot_inline
2669 = "function uses short complex types";
2673 rtx sreg
= gen_rtx_SUBREG (reg_mode
, x
, 0);
2674 emit_move_insn_1 (cmem
, y
);
2675 return emit_move_insn_1 (sreg
, mem
);
2679 rtx sreg
= gen_rtx_SUBREG (reg_mode
, y
, 0);
2680 emit_move_insn_1 (mem
, sreg
);
2681 return emit_move_insn_1 (x
, cmem
);
2687 /* Show the output dies here. This is necessary for pseudos;
2688 hard regs shouldn't appear here except as return values.
2689 We never want to emit such a clobber after reload. */
2691 && ! (reload_in_progress
|| reload_completed
))
2693 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
2696 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2697 (gen_realpart (submode
, x
), gen_realpart (submode
, y
)));
2698 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2699 (gen_imagpart (submode
, x
), gen_imagpart (submode
, y
)));
2702 return get_last_insn ();
2705 /* This will handle any multi-word mode that lacks a move_insn pattern.
2706 However, you will get better code if you define such patterns,
2707 even if they must turn into multiple assembler instructions. */
2708 else if (GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
2712 #ifdef PUSH_ROUNDING
2714 /* If X is a push on the stack, do the push now and replace
2715 X with a reference to the stack pointer. */
2716 if (push_operand (x
, GET_MODE (x
)))
2718 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x
))));
2719 x
= change_address (x
, VOIDmode
, stack_pointer_rtx
);
2723 /* Show the output dies here. This is necessary for pseudos;
2724 hard regs shouldn't appear here except as return values.
2725 We never want to emit such a clobber after reload. */
2727 && ! (reload_in_progress
|| reload_completed
))
2729 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
2733 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
2736 rtx xpart
= operand_subword (x
, i
, 1, mode
);
2737 rtx ypart
= operand_subword (y
, i
, 1, mode
);
2739 /* If we can't get a part of Y, put Y into memory if it is a
2740 constant. Otherwise, force it into a register. If we still
2741 can't get a part of Y, abort. */
2742 if (ypart
== 0 && CONSTANT_P (y
))
2744 y
= force_const_mem (mode
, y
);
2745 ypart
= operand_subword (y
, i
, 1, mode
);
2747 else if (ypart
== 0)
2748 ypart
= operand_subword_force (y
, i
, mode
);
2750 if (xpart
== 0 || ypart
== 0)
2753 last_insn
= emit_move_insn (xpart
, ypart
);
2762 /* Pushing data onto the stack. */
2764 /* Push a block of length SIZE (perhaps variable)
2765 and return an rtx to address the beginning of the block.
2766 Note that it is not possible for the value returned to be a QUEUED.
2767 The value may be virtual_outgoing_args_rtx.
2769 EXTRA is the number of bytes of padding to push in addition to SIZE.
2770 BELOW nonzero means this padding comes at low addresses;
2771 otherwise, the padding comes at high addresses. */
2774 push_block (size
, extra
, below
)
2780 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
2781 if (CONSTANT_P (size
))
2782 anti_adjust_stack (plus_constant (size
, extra
));
2783 else if (GET_CODE (size
) == REG
&& extra
== 0)
2784 anti_adjust_stack (size
);
2787 rtx temp
= copy_to_mode_reg (Pmode
, size
);
2789 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
2790 temp
, 0, OPTAB_LIB_WIDEN
);
2791 anti_adjust_stack (temp
);
2794 #if defined (STACK_GROWS_DOWNWARD) \
2795 || (defined (ARGS_GROW_DOWNWARD) \
2796 && !defined (ACCUMULATE_OUTGOING_ARGS))
2798 /* Return the lowest stack address when STACK or ARGS grow downward and
2799 we are not aaccumulating outgoing arguments (the c4x port uses such
2801 temp
= virtual_outgoing_args_rtx
;
2802 if (extra
!= 0 && below
)
2803 temp
= plus_constant (temp
, extra
);
2805 if (GET_CODE (size
) == CONST_INT
)
2806 temp
= plus_constant (virtual_outgoing_args_rtx
,
2807 - INTVAL (size
) - (below
? 0 : extra
));
2808 else if (extra
!= 0 && !below
)
2809 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
2810 negate_rtx (Pmode
, plus_constant (size
, extra
)));
2812 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
2813 negate_rtx (Pmode
, size
));
2816 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
2822 return gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
2825 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2826 block of SIZE bytes. */
2829 get_push_address (size
)
2834 if (STACK_PUSH_CODE
== POST_DEC
)
2835 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
, GEN_INT (size
));
2836 else if (STACK_PUSH_CODE
== POST_INC
)
2837 temp
= gen_rtx_MINUS (Pmode
, stack_pointer_rtx
, GEN_INT (size
));
2839 temp
= stack_pointer_rtx
;
2841 return copy_to_reg (temp
);
2844 /* Generate code to push X onto the stack, assuming it has mode MODE and
2846 MODE is redundant except when X is a CONST_INT (since they don't
2848 SIZE is an rtx for the size of data to be copied (in bytes),
2849 needed only if X is BLKmode.
2851 ALIGN (in bytes) is maximum alignment we can assume.
2853 If PARTIAL and REG are both nonzero, then copy that many of the first
2854 words of X into registers starting with REG, and push the rest of X.
2855 The amount of space pushed is decreased by PARTIAL words,
2856 rounded *down* to a multiple of PARM_BOUNDARY.
2857 REG must be a hard register in this case.
2858 If REG is zero but PARTIAL is not, take any all others actions for an
2859 argument partially in registers, but do not actually load any
2862 EXTRA is the amount in bytes of extra space to leave next to this arg.
2863 This is ignored if an argument block has already been allocated.
2865 On a machine that lacks real push insns, ARGS_ADDR is the address of
2866 the bottom of the argument block for this call. We use indexing off there
2867 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2868 argument block has not been preallocated.
2870 ARGS_SO_FAR is the size of args previously pushed for this call.
2872 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2873 for arguments passed in registers. If nonzero, it will be the number
2874 of bytes required. */
2877 emit_push_insn (x
, mode
, type
, size
, align
, partial
, reg
, extra
,
2878 args_addr
, args_so_far
, reg_parm_stack_space
,
2881 enum machine_mode mode
;
2890 int reg_parm_stack_space
;
2894 enum direction stack_direction
2895 #ifdef STACK_GROWS_DOWNWARD
2901 /* Decide where to pad the argument: `downward' for below,
2902 `upward' for above, or `none' for don't pad it.
2903 Default is below for small data on big-endian machines; else above. */
2904 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
2906 /* Invert direction if stack is post-update. */
2907 if (STACK_PUSH_CODE
== POST_INC
|| STACK_PUSH_CODE
== POST_DEC
)
2908 if (where_pad
!= none
)
2909 where_pad
= (where_pad
== downward
? upward
: downward
);
2911 xinner
= x
= protect_from_queue (x
, 0);
2913 if (mode
== BLKmode
)
2915 /* Copy a block into the stack, entirely or partially. */
2918 int used
= partial
* UNITS_PER_WORD
;
2919 int offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
2927 /* USED is now the # of bytes we need not copy to the stack
2928 because registers will take care of them. */
2931 xinner
= change_address (xinner
, BLKmode
,
2932 plus_constant (XEXP (xinner
, 0), used
));
2934 /* If the partial register-part of the arg counts in its stack size,
2935 skip the part of stack space corresponding to the registers.
2936 Otherwise, start copying to the beginning of the stack space,
2937 by setting SKIP to 0. */
2938 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
2940 #ifdef PUSH_ROUNDING
2941 /* Do it with several push insns if that doesn't take lots of insns
2942 and if there is no difficulty with push insns that skip bytes
2943 on the stack for alignment purposes. */
2945 && GET_CODE (size
) == CONST_INT
2947 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
2948 /* Here we avoid the case of a structure whose weak alignment
2949 forces many pushes of a small amount of data,
2950 and such small pushes do rounding that causes trouble. */
2951 && ((! SLOW_UNALIGNED_ACCESS
)
2952 || align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
2953 || PUSH_ROUNDING (align
) == align
)
2954 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
2956 /* Push padding now if padding above and stack grows down,
2957 or if padding below and stack grows up.
2958 But if space already allocated, this has already been done. */
2959 if (extra
&& args_addr
== 0
2960 && where_pad
!= none
&& where_pad
!= stack_direction
)
2961 anti_adjust_stack (GEN_INT (extra
));
2963 move_by_pieces (gen_rtx_MEM (BLKmode
, gen_push_operand ()), xinner
,
2964 INTVAL (size
) - used
, align
);
2966 if (current_function_check_memory_usage
&& ! in_check_memory_usage
)
2970 in_check_memory_usage
= 1;
2971 temp
= get_push_address (INTVAL(size
) - used
);
2972 if (GET_CODE (x
) == MEM
&& type
&& AGGREGATE_TYPE_P (type
))
2973 emit_library_call (chkr_copy_bitmap_libfunc
, 1, VOIDmode
, 3,
2975 XEXP (xinner
, 0), Pmode
,
2976 GEN_INT (INTVAL(size
) - used
),
2977 TYPE_MODE (sizetype
));
2979 emit_library_call (chkr_set_right_libfunc
, 1, VOIDmode
, 3,
2981 GEN_INT (INTVAL(size
) - used
),
2982 TYPE_MODE (sizetype
),
2983 GEN_INT (MEMORY_USE_RW
),
2984 TYPE_MODE (integer_type_node
));
2985 in_check_memory_usage
= 0;
2989 #endif /* PUSH_ROUNDING */
2991 /* Otherwise make space on the stack and copy the data
2992 to the address of that space. */
2994 /* Deduct words put into registers from the size we must copy. */
2997 if (GET_CODE (size
) == CONST_INT
)
2998 size
= GEN_INT (INTVAL (size
) - used
);
3000 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
3001 GEN_INT (used
), NULL_RTX
, 0,
3005 /* Get the address of the stack space.
3006 In this case, we do not deal with EXTRA separately.
3007 A single stack adjust will do. */
3010 temp
= push_block (size
, extra
, where_pad
== downward
);
3013 else if (GET_CODE (args_so_far
) == CONST_INT
)
3014 temp
= memory_address (BLKmode
,
3015 plus_constant (args_addr
,
3016 skip
+ INTVAL (args_so_far
)));
3018 temp
= memory_address (BLKmode
,
3019 plus_constant (gen_rtx_PLUS (Pmode
,
3023 if (current_function_check_memory_usage
&& ! in_check_memory_usage
)
3027 in_check_memory_usage
= 1;
3028 target
= copy_to_reg (temp
);
3029 if (GET_CODE (x
) == MEM
&& type
&& AGGREGATE_TYPE_P (type
))
3030 emit_library_call (chkr_copy_bitmap_libfunc
, 1, VOIDmode
, 3,
3032 XEXP (xinner
, 0), Pmode
,
3033 size
, TYPE_MODE (sizetype
));
3035 emit_library_call (chkr_set_right_libfunc
, 1, VOIDmode
, 3,
3037 size
, TYPE_MODE (sizetype
),
3038 GEN_INT (MEMORY_USE_RW
),
3039 TYPE_MODE (integer_type_node
));
3040 in_check_memory_usage
= 0;
3043 /* TEMP is the address of the block. Copy the data there. */
3044 if (GET_CODE (size
) == CONST_INT
3045 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
), align
)))
3047 move_by_pieces (gen_rtx_MEM (BLKmode
, temp
), xinner
,
3048 INTVAL (size
), align
);
3053 rtx opalign
= GEN_INT (align
);
3054 enum machine_mode mode
;
3055 rtx target
= gen_rtx_MEM (BLKmode
, temp
);
3057 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
3059 mode
= GET_MODE_WIDER_MODE (mode
))
3061 enum insn_code code
= movstr_optab
[(int) mode
];
3062 insn_operand_predicate_fn pred
;
3064 if (code
!= CODE_FOR_nothing
3065 && ((GET_CODE (size
) == CONST_INT
3066 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
3067 <= (GET_MODE_MASK (mode
) >> 1)))
3068 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
3069 && (!(pred
= insn_data
[(int) code
].operand
[0].predicate
)
3070 || ((*pred
) (target
, BLKmode
)))
3071 && (!(pred
= insn_data
[(int) code
].operand
[1].predicate
)
3072 || ((*pred
) (xinner
, BLKmode
)))
3073 && (!(pred
= insn_data
[(int) code
].operand
[3].predicate
)
3074 || ((*pred
) (opalign
, VOIDmode
))))
3076 rtx op2
= convert_to_mode (mode
, size
, 1);
3077 rtx last
= get_last_insn ();
3080 pred
= insn_data
[(int) code
].operand
[2].predicate
;
3081 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
3082 op2
= copy_to_mode_reg (mode
, op2
);
3084 pat
= GEN_FCN ((int) code
) (target
, xinner
,
3092 delete_insns_since (last
);
3097 #ifndef ACCUMULATE_OUTGOING_ARGS
3098 /* If the source is referenced relative to the stack pointer,
3099 copy it to another register to stabilize it. We do not need
3100 to do this if we know that we won't be changing sp. */
3102 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
3103 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
3104 temp
= copy_to_reg (temp
);
3107 /* Make inhibit_defer_pop nonzero around the library call
3108 to force it to pop the bcopy-arguments right away. */
3110 #ifdef TARGET_MEM_FUNCTIONS
3111 emit_library_call (memcpy_libfunc
, 0,
3112 VOIDmode
, 3, temp
, Pmode
, XEXP (xinner
, 0), Pmode
,
3113 convert_to_mode (TYPE_MODE (sizetype
),
3114 size
, TREE_UNSIGNED (sizetype
)),
3115 TYPE_MODE (sizetype
));
3117 emit_library_call (bcopy_libfunc
, 0,
3118 VOIDmode
, 3, XEXP (xinner
, 0), Pmode
, temp
, Pmode
,
3119 convert_to_mode (TYPE_MODE (integer_type_node
),
3121 TREE_UNSIGNED (integer_type_node
)),
3122 TYPE_MODE (integer_type_node
));
3127 else if (partial
> 0)
3129 /* Scalar partly in registers. */
3131 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
3134 /* # words of start of argument
3135 that we must make space for but need not store. */
3136 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_WORD
);
3137 int args_offset
= INTVAL (args_so_far
);
3140 /* Push padding now if padding above and stack grows down,
3141 or if padding below and stack grows up.
3142 But if space already allocated, this has already been done. */
3143 if (extra
&& args_addr
== 0
3144 && where_pad
!= none
&& where_pad
!= stack_direction
)
3145 anti_adjust_stack (GEN_INT (extra
));
3147 /* If we make space by pushing it, we might as well push
3148 the real data. Otherwise, we can leave OFFSET nonzero
3149 and leave the space uninitialized. */
3153 /* Now NOT_STACK gets the number of words that we don't need to
3154 allocate on the stack. */
3155 not_stack
= partial
- offset
;
3157 /* If the partial register-part of the arg counts in its stack size,
3158 skip the part of stack space corresponding to the registers.
3159 Otherwise, start copying to the beginning of the stack space,
3160 by setting SKIP to 0. */
3161 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
3163 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
3164 x
= validize_mem (force_const_mem (mode
, x
));
3166 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3167 SUBREGs of such registers are not allowed. */
3168 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
3169 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
3170 x
= copy_to_reg (x
);
3172 /* Loop over all the words allocated on the stack for this arg. */
3173 /* We can do it by words, because any scalar bigger than a word
3174 has a size a multiple of a word. */
3175 #ifndef PUSH_ARGS_REVERSED
3176 for (i
= not_stack
; i
< size
; i
++)
3178 for (i
= size
- 1; i
>= not_stack
; i
--)
3180 if (i
>= not_stack
+ offset
)
3181 emit_push_insn (operand_subword_force (x
, i
, mode
),
3182 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
3184 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
3186 reg_parm_stack_space
, alignment_pad
);
3191 rtx target
= NULL_RTX
;
3193 /* Push padding now if padding above and stack grows down,
3194 or if padding below and stack grows up.
3195 But if space already allocated, this has already been done. */
3196 if (extra
&& args_addr
== 0
3197 && where_pad
!= none
&& where_pad
!= stack_direction
)
3198 anti_adjust_stack (GEN_INT (extra
));
3200 #ifdef PUSH_ROUNDING
3202 addr
= gen_push_operand ();
3206 if (GET_CODE (args_so_far
) == CONST_INT
)
3208 = memory_address (mode
,
3209 plus_constant (args_addr
,
3210 INTVAL (args_so_far
)));
3212 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
3217 emit_move_insn (gen_rtx_MEM (mode
, addr
), x
);
3219 if (current_function_check_memory_usage
&& ! in_check_memory_usage
)
3221 in_check_memory_usage
= 1;
3223 target
= get_push_address (GET_MODE_SIZE (mode
));
3225 if (GET_CODE (x
) == MEM
&& type
&& AGGREGATE_TYPE_P (type
))
3226 emit_library_call (chkr_copy_bitmap_libfunc
, 1, VOIDmode
, 3,
3229 GEN_INT (GET_MODE_SIZE (mode
)),
3230 TYPE_MODE (sizetype
));
3232 emit_library_call (chkr_set_right_libfunc
, 1, VOIDmode
, 3,
3234 GEN_INT (GET_MODE_SIZE (mode
)),
3235 TYPE_MODE (sizetype
),
3236 GEN_INT (MEMORY_USE_RW
),
3237 TYPE_MODE (integer_type_node
));
3238 in_check_memory_usage
= 0;
3243 /* If part should go in registers, copy that part
3244 into the appropriate registers. Do this now, at the end,
3245 since mem-to-mem copies above may do function calls. */
3246 if (partial
> 0 && reg
!= 0)
3248 /* Handle calls that pass values in multiple non-contiguous locations.
3249 The Irix 6 ABI has examples of this. */
3250 if (GET_CODE (reg
) == PARALLEL
)
3251 emit_group_load (reg
, x
, -1, align
); /* ??? size? */
3253 move_block_to_reg (REGNO (reg
), x
, partial
, mode
);
3256 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
3257 anti_adjust_stack (GEN_INT (extra
));
3260 anti_adjust_stack (alignment_pad
);
3263 /* Expand an assignment that stores the value of FROM into TO.
3264 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3265 (This may contain a QUEUED rtx;
3266 if the value is constant, this rtx is a constant.)
3267 Otherwise, the returned value is NULL_RTX.
3269 SUGGEST_REG is no longer actually used.
3270 It used to mean, copy the value through a register
3271 and return that register, if that is possible.
3272 We now use WANT_VALUE to decide whether to do this. */
3275 expand_assignment (to
, from
, want_value
, suggest_reg
)
3278 int suggest_reg ATTRIBUTE_UNUSED
;
3280 register rtx to_rtx
= 0;
3283 /* Don't crash if the lhs of the assignment was erroneous. */
3285 if (TREE_CODE (to
) == ERROR_MARK
)
3287 result
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3288 return want_value
? result
: NULL_RTX
;
3291 /* Assignment of a structure component needs special treatment
3292 if the structure component's rtx is not simply a MEM.
3293 Assignment of an array element at a constant index, and assignment of
3294 an array element in an unaligned packed structure field, has the same
3297 if (TREE_CODE (to
) == COMPONENT_REF
|| TREE_CODE (to
) == BIT_FIELD_REF
3298 || TREE_CODE (to
) == ARRAY_REF
)
3300 enum machine_mode mode1
;
3310 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
3311 &unsignedp
, &volatilep
, &alignment
);
3313 /* If we are going to use store_bit_field and extract_bit_field,
3314 make sure to_rtx will be safe for multiple use. */
3316 if (mode1
== VOIDmode
&& want_value
)
3317 tem
= stabilize_reference (tem
);
3319 to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_DONT
);
3322 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
3324 if (GET_CODE (to_rtx
) != MEM
)
3327 if (GET_MODE (offset_rtx
) != ptr_mode
)
3329 #ifdef POINTERS_EXTEND_UNSIGNED
3330 offset_rtx
= convert_memory_address (ptr_mode
, offset_rtx
);
3332 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
3336 /* A constant address in TO_RTX can have VOIDmode, we must not try
3337 to call force_reg for that case. Avoid that case. */
3338 if (GET_CODE (to_rtx
) == MEM
3339 && GET_MODE (to_rtx
) == BLKmode
3340 && GET_MODE (XEXP (to_rtx
, 0)) != VOIDmode
3342 && (bitpos
% bitsize
) == 0
3343 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
3344 && (alignment
* BITS_PER_UNIT
) == GET_MODE_ALIGNMENT (mode1
))
3346 rtx temp
= change_address (to_rtx
, mode1
,
3347 plus_constant (XEXP (to_rtx
, 0),
3350 if (GET_CODE (XEXP (temp
, 0)) == REG
)
3353 to_rtx
= change_address (to_rtx
, mode1
,
3354 force_reg (GET_MODE (XEXP (temp
, 0)),
3359 to_rtx
= change_address (to_rtx
, VOIDmode
,
3360 gen_rtx_PLUS (ptr_mode
, XEXP (to_rtx
, 0),
3361 force_reg (ptr_mode
,
3367 if (GET_CODE (to_rtx
) == MEM
)
3369 /* When the offset is zero, to_rtx is the address of the
3370 structure we are storing into, and hence may be shared.
3371 We must make a new MEM before setting the volatile bit. */
3373 to_rtx
= copy_rtx (to_rtx
);
3375 MEM_VOLATILE_P (to_rtx
) = 1;
3377 #if 0 /* This was turned off because, when a field is volatile
3378 in an object which is not volatile, the object may be in a register,
3379 and then we would abort over here. */
3385 if (TREE_CODE (to
) == COMPONENT_REF
3386 && TREE_READONLY (TREE_OPERAND (to
, 1)))
3389 to_rtx
= copy_rtx (to_rtx
);
3391 RTX_UNCHANGING_P (to_rtx
) = 1;
3394 /* Check the access. */
3395 if (current_function_check_memory_usage
&& GET_CODE (to_rtx
) == MEM
)
3400 enum machine_mode best_mode
;
3402 best_mode
= get_best_mode (bitsize
, bitpos
,
3403 TYPE_ALIGN (TREE_TYPE (tem
)),
3405 if (best_mode
== VOIDmode
)
3408 best_mode_size
= GET_MODE_BITSIZE (best_mode
);
3409 to_addr
= plus_constant (XEXP (to_rtx
, 0), (bitpos
/ BITS_PER_UNIT
));
3410 size
= CEIL ((bitpos
% best_mode_size
) + bitsize
, best_mode_size
);
3411 size
*= GET_MODE_SIZE (best_mode
);
3413 /* Check the access right of the pointer. */
3415 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
3417 GEN_INT (size
), TYPE_MODE (sizetype
),
3418 GEN_INT (MEMORY_USE_WO
),
3419 TYPE_MODE (integer_type_node
));
3422 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
3424 /* Spurious cast makes HPUX compiler happy. */
3425 ? (enum machine_mode
) TYPE_MODE (TREE_TYPE (to
))
3428 /* Required alignment of containing datum. */
3430 int_size_in_bytes (TREE_TYPE (tem
)),
3431 get_alias_set (to
));
3432 preserve_temp_slots (result
);
3436 /* If the value is meaningful, convert RESULT to the proper mode.
3437 Otherwise, return nothing. */
3438 return (want_value
? convert_modes (TYPE_MODE (TREE_TYPE (to
)),
3439 TYPE_MODE (TREE_TYPE (from
)),
3441 TREE_UNSIGNED (TREE_TYPE (to
)))
3445 /* If the rhs is a function call and its value is not an aggregate,
3446 call the function before we start to compute the lhs.
3447 This is needed for correct code for cases such as
3448 val = setjmp (buf) on machines where reference to val
3449 requires loading up part of an address in a separate insn.
3451 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3452 a promoted variable where the zero- or sign- extension needs to be done.
3453 Handling this in the normal way is safe because no computation is done
3455 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
)
3456 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
3457 && ! (TREE_CODE (to
) == VAR_DECL
&& GET_CODE (DECL_RTL (to
)) == REG
))
3462 value
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3464 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_WO
);
3466 /* Handle calls that return values in multiple non-contiguous locations.
3467 The Irix 6 ABI has examples of this. */
3468 if (GET_CODE (to_rtx
) == PARALLEL
)
3469 emit_group_load (to_rtx
, value
, int_size_in_bytes (TREE_TYPE (from
)),
3470 TYPE_ALIGN (TREE_TYPE (from
)) / BITS_PER_UNIT
);
3471 else if (GET_MODE (to_rtx
) == BLKmode
)
3472 emit_block_move (to_rtx
, value
, expr_size (from
),
3473 TYPE_ALIGN (TREE_TYPE (from
)) / BITS_PER_UNIT
);
3476 #ifdef POINTERS_EXTEND_UNSIGNED
3477 if (TREE_CODE (TREE_TYPE (to
)) == REFERENCE_TYPE
3478 || TREE_CODE (TREE_TYPE (to
)) == POINTER_TYPE
)
3479 value
= convert_memory_address (GET_MODE (to_rtx
), value
);
3481 emit_move_insn (to_rtx
, value
);
3483 preserve_temp_slots (to_rtx
);
3486 return want_value
? to_rtx
: NULL_RTX
;
3489 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3490 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3494 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_WO
);
3495 if (GET_CODE (to_rtx
) == MEM
)
3496 MEM_ALIAS_SET (to_rtx
) = get_alias_set (to
);
3499 /* Don't move directly into a return register. */
3500 if (TREE_CODE (to
) == RESULT_DECL
3501 && (GET_CODE (to_rtx
) == REG
|| GET_CODE (to_rtx
) == PARALLEL
))
3506 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
3508 if (GET_CODE (to_rtx
) == PARALLEL
)
3509 emit_group_load (to_rtx
, temp
, int_size_in_bytes (TREE_TYPE (from
)),
3510 TYPE_ALIGN (TREE_TYPE (from
)) / BITS_PER_UNIT
);
3512 emit_move_insn (to_rtx
, temp
);
3514 preserve_temp_slots (to_rtx
);
3517 return want_value
? to_rtx
: NULL_RTX
;
3520 /* In case we are returning the contents of an object which overlaps
3521 the place the value is being stored, use a safe function when copying
3522 a value through a pointer into a structure value return block. */
3523 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
3524 && current_function_returns_struct
3525 && !current_function_returns_pcc_struct
)
3530 size
= expr_size (from
);
3531 from_rtx
= expand_expr (from
, NULL_RTX
, VOIDmode
,
3532 EXPAND_MEMORY_USE_DONT
);
3534 /* Copy the rights of the bitmap. */
3535 if (current_function_check_memory_usage
)
3536 emit_library_call (chkr_copy_bitmap_libfunc
, 1, VOIDmode
, 3,
3537 XEXP (to_rtx
, 0), Pmode
,
3538 XEXP (from_rtx
, 0), Pmode
,
3539 convert_to_mode (TYPE_MODE (sizetype
),
3540 size
, TREE_UNSIGNED (sizetype
)),
3541 TYPE_MODE (sizetype
));
3543 #ifdef TARGET_MEM_FUNCTIONS
3544 emit_library_call (memcpy_libfunc
, 0,
3545 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
3546 XEXP (from_rtx
, 0), Pmode
,
3547 convert_to_mode (TYPE_MODE (sizetype
),
3548 size
, TREE_UNSIGNED (sizetype
)),
3549 TYPE_MODE (sizetype
));
3551 emit_library_call (bcopy_libfunc
, 0,
3552 VOIDmode
, 3, XEXP (from_rtx
, 0), Pmode
,
3553 XEXP (to_rtx
, 0), Pmode
,
3554 convert_to_mode (TYPE_MODE (integer_type_node
),
3555 size
, TREE_UNSIGNED (integer_type_node
)),
3556 TYPE_MODE (integer_type_node
));
3559 preserve_temp_slots (to_rtx
);
3562 return want_value
? to_rtx
: NULL_RTX
;
3565 /* Compute FROM and store the value in the rtx we got. */
3568 result
= store_expr (from
, to_rtx
, want_value
);
3569 preserve_temp_slots (result
);
3572 return want_value
? result
: NULL_RTX
;
3575 /* Generate code for computing expression EXP,
3576 and storing the value into TARGET.
3577 TARGET may contain a QUEUED rtx.
3579 If WANT_VALUE is nonzero, return a copy of the value
3580 not in TARGET, so that we can be sure to use the proper
3581 value in a containing expression even if TARGET has something
3582 else stored in it. If possible, we copy the value through a pseudo
3583 and return that pseudo. Or, if the value is constant, we try to
3584 return the constant. In some cases, we return a pseudo
3585 copied *from* TARGET.
3587 If the mode is BLKmode then we may return TARGET itself.
3588 It turns out that in BLKmode it doesn't cause a problem.
3589 because C has no operators that could combine two different
3590 assignments into the same BLKmode object with different values
3591 with no sequence point. Will other languages need this to
3594 If WANT_VALUE is 0, we return NULL, to make sure
3595 to catch quickly any cases where the caller uses the value
3596 and fails to set WANT_VALUE. */
3599 store_expr (exp
, target
, want_value
)
3601 register rtx target
;
3605 int dont_return_target
= 0;
3607 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
3609 /* Perform first part of compound expression, then assign from second
3611 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
3613 return store_expr (TREE_OPERAND (exp
, 1), target
, want_value
);
3615 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
3617 /* For conditional expression, get safe form of the target. Then
3618 test the condition, doing the appropriate assignment on either
3619 side. This avoids the creation of unnecessary temporaries.
3620 For non-BLKmode, it is more efficient not to do this. */
3622 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
3625 target
= protect_from_queue (target
, 1);
3627 do_pending_stack_adjust ();
3629 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
3630 start_cleanup_deferral ();
3631 store_expr (TREE_OPERAND (exp
, 1), target
, 0);
3632 end_cleanup_deferral ();
3634 emit_jump_insn (gen_jump (lab2
));
3637 start_cleanup_deferral ();
3638 store_expr (TREE_OPERAND (exp
, 2), target
, 0);
3639 end_cleanup_deferral ();
3644 return want_value
? target
: NULL_RTX
;
3646 else if (queued_subexp_p (target
))
3647 /* If target contains a postincrement, let's not risk
3648 using it as the place to generate the rhs. */
3650 if (GET_MODE (target
) != BLKmode
&& GET_MODE (target
) != VOIDmode
)
3652 /* Expand EXP into a new pseudo. */
3653 temp
= gen_reg_rtx (GET_MODE (target
));
3654 temp
= expand_expr (exp
, temp
, GET_MODE (target
), 0);
3657 temp
= expand_expr (exp
, NULL_RTX
, GET_MODE (target
), 0);
3659 /* If target is volatile, ANSI requires accessing the value
3660 *from* the target, if it is accessed. So make that happen.
3661 In no case return the target itself. */
3662 if (! MEM_VOLATILE_P (target
) && want_value
)
3663 dont_return_target
= 1;
3665 else if (want_value
&& GET_CODE (target
) == MEM
&& ! MEM_VOLATILE_P (target
)
3666 && GET_MODE (target
) != BLKmode
)
3667 /* If target is in memory and caller wants value in a register instead,
3668 arrange that. Pass TARGET as target for expand_expr so that,
3669 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3670 We know expand_expr will not use the target in that case.
3671 Don't do this if TARGET is volatile because we are supposed
3672 to write it and then read it. */
3674 temp
= expand_expr (exp
, target
, GET_MODE (target
), 0);
3675 if (GET_MODE (temp
) != BLKmode
&& GET_MODE (temp
) != VOIDmode
)
3676 temp
= copy_to_reg (temp
);
3677 dont_return_target
= 1;
3679 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
3680 /* If this is an scalar in a register that is stored in a wider mode
3681 than the declared mode, compute the result into its declared mode
3682 and then convert to the wider mode. Our value is the computed
3685 /* If we don't want a value, we can do the conversion inside EXP,
3686 which will often result in some optimizations. Do the conversion
3687 in two steps: first change the signedness, if needed, then
3688 the extend. But don't do this if the type of EXP is a subtype
3689 of something else since then the conversion might involve
3690 more than just converting modes. */
3691 if (! want_value
&& INTEGRAL_TYPE_P (TREE_TYPE (exp
))
3692 && TREE_TYPE (TREE_TYPE (exp
)) == 0)
3694 if (TREE_UNSIGNED (TREE_TYPE (exp
))
3695 != SUBREG_PROMOTED_UNSIGNED_P (target
))
3698 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target
),
3702 exp
= convert (type_for_mode (GET_MODE (SUBREG_REG (target
)),
3703 SUBREG_PROMOTED_UNSIGNED_P (target
)),
3707 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
3709 /* If TEMP is a volatile MEM and we want a result value, make
3710 the access now so it gets done only once. Likewise if
3711 it contains TARGET. */
3712 if (GET_CODE (temp
) == MEM
&& want_value
3713 && (MEM_VOLATILE_P (temp
)
3714 || reg_mentioned_p (SUBREG_REG (target
), XEXP (temp
, 0))))
3715 temp
= copy_to_reg (temp
);
3717 /* If TEMP is a VOIDmode constant, use convert_modes to make
3718 sure that we properly convert it. */
3719 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
3720 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
3721 TYPE_MODE (TREE_TYPE (exp
)), temp
,
3722 SUBREG_PROMOTED_UNSIGNED_P (target
));
3724 convert_move (SUBREG_REG (target
), temp
,
3725 SUBREG_PROMOTED_UNSIGNED_P (target
));
3727 /* If we promoted a constant, change the mode back down to match
3728 target. Otherwise, the caller might get confused by a result whose
3729 mode is larger than expected. */
3731 if (want_value
&& GET_MODE (temp
) != GET_MODE (target
)
3732 && GET_MODE (temp
) != VOIDmode
)
3734 temp
= gen_rtx_SUBREG (GET_MODE (target
), temp
, 0);
3735 SUBREG_PROMOTED_VAR_P (temp
) = 1;
3736 SUBREG_PROMOTED_UNSIGNED_P (temp
)
3737 = SUBREG_PROMOTED_UNSIGNED_P (target
);
3740 return want_value
? temp
: NULL_RTX
;
3744 temp
= expand_expr (exp
, target
, GET_MODE (target
), 0);
3745 /* Return TARGET if it's a specified hardware register.
3746 If TARGET is a volatile mem ref, either return TARGET
3747 or return a reg copied *from* TARGET; ANSI requires this.
3749 Otherwise, if TEMP is not TARGET, return TEMP
3750 if it is constant (for efficiency),
3751 or if we really want the correct value. */
3752 if (!(target
&& GET_CODE (target
) == REG
3753 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
3754 && !(GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
3755 && ! rtx_equal_p (temp
, target
)
3756 && (CONSTANT_P (temp
) || want_value
))
3757 dont_return_target
= 1;
3760 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3761 the same as that of TARGET, adjust the constant. This is needed, for
3762 example, in case it is a CONST_DOUBLE and we want only a word-sized
3764 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
3765 && TREE_CODE (exp
) != ERROR_MARK
3766 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
3767 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
3768 temp
, TREE_UNSIGNED (TREE_TYPE (exp
)));
3770 if (current_function_check_memory_usage
3771 && GET_CODE (target
) == MEM
3772 && AGGREGATE_TYPE_P (TREE_TYPE (exp
)))
3774 if (GET_CODE (temp
) == MEM
)
3775 emit_library_call (chkr_copy_bitmap_libfunc
, 1, VOIDmode
, 3,
3776 XEXP (target
, 0), Pmode
,
3777 XEXP (temp
, 0), Pmode
,
3778 expr_size (exp
), TYPE_MODE (sizetype
));
3780 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
3781 XEXP (target
, 0), Pmode
,
3782 expr_size (exp
), TYPE_MODE (sizetype
),
3783 GEN_INT (MEMORY_USE_WO
),
3784 TYPE_MODE (integer_type_node
));
3787 /* If value was not generated in the target, store it there.
3788 Convert the value to TARGET's type first if nec. */
3789 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3790 one or both of them are volatile memory refs, we have to distinguish
3792 - expand_expr has used TARGET. In this case, we must not generate
3793 another copy. This can be detected by TARGET being equal according
3795 - expand_expr has not used TARGET - that means that the source just
3796 happens to have the same RTX form. Since temp will have been created
3797 by expand_expr, it will compare unequal according to == .
3798 We must generate a copy in this case, to reach the correct number
3799 of volatile memory references. */
3801 if ((! rtx_equal_p (temp
, target
)
3802 || (temp
!= target
&& (side_effects_p (temp
)
3803 || side_effects_p (target
))))
3804 && TREE_CODE (exp
) != ERROR_MARK
)
3806 target
= protect_from_queue (target
, 1);
3807 if (GET_MODE (temp
) != GET_MODE (target
)
3808 && GET_MODE (temp
) != VOIDmode
)
3810 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
3811 if (dont_return_target
)
3813 /* In this case, we will return TEMP,
3814 so make sure it has the proper mode.
3815 But don't forget to store the value into TARGET. */
3816 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
3817 emit_move_insn (target
, temp
);
3820 convert_move (target
, temp
, unsignedp
);
3823 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
3825 /* Handle copying a string constant into an array.
3826 The string constant may be shorter than the array.
3827 So copy just the string's actual length, and clear the rest. */
3831 /* Get the size of the data type of the string,
3832 which is actually the size of the target. */
3833 size
= expr_size (exp
);
3834 if (GET_CODE (size
) == CONST_INT
3835 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
3836 emit_block_move (target
, temp
, size
,
3837 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
3840 /* Compute the size of the data to copy from the string. */
3842 = size_binop (MIN_EXPR
,
3843 make_tree (sizetype
, size
),
3845 build_int_2 (TREE_STRING_LENGTH (exp
), 0)));
3846 rtx copy_size_rtx
= expand_expr (copy_size
, NULL_RTX
,
3850 /* Copy that much. */
3851 emit_block_move (target
, temp
, copy_size_rtx
,
3852 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
3854 /* Figure out how much is left in TARGET that we have to clear.
3855 Do all calculations in ptr_mode. */
3857 addr
= XEXP (target
, 0);
3858 addr
= convert_modes (ptr_mode
, Pmode
, addr
, 1);
3860 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
3862 addr
= plus_constant (addr
, TREE_STRING_LENGTH (exp
));
3863 size
= plus_constant (size
, - TREE_STRING_LENGTH (exp
));
3867 addr
= force_reg (ptr_mode
, addr
);
3868 addr
= expand_binop (ptr_mode
, add_optab
, addr
,
3869 copy_size_rtx
, NULL_RTX
, 0,
3872 size
= expand_binop (ptr_mode
, sub_optab
, size
,
3873 copy_size_rtx
, NULL_RTX
, 0,
3876 label
= gen_label_rtx ();
3877 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
3878 GET_MODE (size
), 0, 0, label
);
3881 if (size
!= const0_rtx
)
3883 /* Be sure we can write on ADDR. */
3884 if (current_function_check_memory_usage
)
3885 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
3887 size
, TYPE_MODE (sizetype
),
3888 GEN_INT (MEMORY_USE_WO
),
3889 TYPE_MODE (integer_type_node
));
3890 #ifdef TARGET_MEM_FUNCTIONS
3891 emit_library_call (memset_libfunc
, 0, VOIDmode
, 3,
3893 const0_rtx
, TYPE_MODE (integer_type_node
),
3894 convert_to_mode (TYPE_MODE (sizetype
),
3896 TREE_UNSIGNED (sizetype
)),
3897 TYPE_MODE (sizetype
));
3899 emit_library_call (bzero_libfunc
, 0, VOIDmode
, 2,
3901 convert_to_mode (TYPE_MODE (integer_type_node
),
3903 TREE_UNSIGNED (integer_type_node
)),
3904 TYPE_MODE (integer_type_node
));
3912 /* Handle calls that return values in multiple non-contiguous locations.
3913 The Irix 6 ABI has examples of this. */
3914 else if (GET_CODE (target
) == PARALLEL
)
3915 emit_group_load (target
, temp
, int_size_in_bytes (TREE_TYPE (exp
)),
3916 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
3917 else if (GET_MODE (temp
) == BLKmode
)
3918 emit_block_move (target
, temp
, expr_size (exp
),
3919 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
3921 emit_move_insn (target
, temp
);
3924 /* If we don't want a value, return NULL_RTX. */
3928 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3929 ??? The latter test doesn't seem to make sense. */
3930 else if (dont_return_target
&& GET_CODE (temp
) != MEM
)
3933 /* Return TARGET itself if it is a hard register. */
3934 else if (want_value
&& GET_MODE (target
) != BLKmode
3935 && ! (GET_CODE (target
) == REG
3936 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
3937 return copy_to_reg (target
);
3943 /* Return 1 if EXP just contains zeros. */
3951 switch (TREE_CODE (exp
))
3955 case NON_LVALUE_EXPR
:
3956 return is_zeros_p (TREE_OPERAND (exp
, 0));
3959 return TREE_INT_CST_LOW (exp
) == 0 && TREE_INT_CST_HIGH (exp
) == 0;
3963 is_zeros_p (TREE_REALPART (exp
)) && is_zeros_p (TREE_IMAGPART (exp
));
3966 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp
), dconst0
);
3969 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
3970 return CONSTRUCTOR_ELTS (exp
) == NULL_TREE
;
3971 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
3972 if (! is_zeros_p (TREE_VALUE (elt
)))
3982 /* Return 1 if EXP contains mostly (3/4) zeros. */
3985 mostly_zeros_p (exp
)
3988 if (TREE_CODE (exp
) == CONSTRUCTOR
)
3990 int elts
= 0, zeros
= 0;
3991 tree elt
= CONSTRUCTOR_ELTS (exp
);
3992 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
3994 /* If there are no ranges of true bits, it is all zero. */
3995 return elt
== NULL_TREE
;
3997 for (; elt
; elt
= TREE_CHAIN (elt
))
3999 /* We do not handle the case where the index is a RANGE_EXPR,
4000 so the statistic will be somewhat inaccurate.
4001 We do make a more accurate count in store_constructor itself,
4002 so since this function is only used for nested array elements,
4003 this should be close enough. */
4004 if (mostly_zeros_p (TREE_VALUE (elt
)))
4009 return 4 * zeros
>= 3 * elts
;
4012 return is_zeros_p (exp
);
4015 /* Helper function for store_constructor.
4016 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4017 TYPE is the type of the CONSTRUCTOR, not the element type.
4018 ALIGN and CLEARED are as for store_constructor.
4020 This provides a recursive shortcut back to store_constructor when it isn't
4021 necessary to go through store_field. This is so that we can pass through
4022 the cleared field to let store_constructor know that we may not have to
4023 clear a substructure if the outer structure has already been cleared. */
4026 store_constructor_field (target
, bitsize
, bitpos
,
4027 mode
, exp
, type
, align
, cleared
)
4029 int bitsize
, bitpos
;
4030 enum machine_mode mode
;
4035 if (TREE_CODE (exp
) == CONSTRUCTOR
4036 && bitpos
% BITS_PER_UNIT
== 0
4037 /* If we have a non-zero bitpos for a register target, then we just
4038 let store_field do the bitfield handling. This is unlikely to
4039 generate unnecessary clear instructions anyways. */
4040 && (bitpos
== 0 || GET_CODE (target
) == MEM
))
4044 = change_address (target
,
4045 GET_MODE (target
) == BLKmode
4047 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
4048 ? BLKmode
: VOIDmode
,
4049 plus_constant (XEXP (target
, 0),
4050 bitpos
/ BITS_PER_UNIT
));
4051 store_constructor (exp
, target
, align
, cleared
);
4054 store_field (target
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0,
4055 (align
+ BITS_PER_UNIT
- 1) / BITS_PER_UNIT
,
4056 int_size_in_bytes (type
), cleared
);
4059 /* Store the value of constructor EXP into the rtx TARGET.
4060 TARGET is either a REG or a MEM.
4061 ALIGN is the maximum known alignment for TARGET, in bits.
4062 CLEARED is true if TARGET is known to have been zero'd. */
4065 store_constructor (exp
, target
, align
, cleared
)
4071 tree type
= TREE_TYPE (exp
);
4072 #ifdef WORD_REGISTER_OPERATIONS
4073 rtx exp_size
= expr_size (exp
);
4076 /* We know our target cannot conflict, since safe_from_p has been called. */
4078 /* Don't try copying piece by piece into a hard register
4079 since that is vulnerable to being clobbered by EXP.
4080 Instead, construct in a pseudo register and then copy it all. */
4081 if (GET_CODE (target
) == REG
&& REGNO (target
) < FIRST_PSEUDO_REGISTER
)
4083 rtx temp
= gen_reg_rtx (GET_MODE (target
));
4084 store_constructor (exp
, temp
, 0);
4085 emit_move_insn (target
, temp
);
4090 if (TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
4091 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4095 /* Inform later passes that the whole union value is dead. */
4096 if ((TREE_CODE (type
) == UNION_TYPE
4097 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4100 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4102 /* If the constructor is empty, clear the union. */
4103 if (! CONSTRUCTOR_ELTS (exp
) && ! cleared
)
4104 clear_storage (target
, expr_size (exp
),
4105 TYPE_ALIGN (type
) / BITS_PER_UNIT
);
4108 /* If we are building a static constructor into a register,
4109 set the initial value as zero so we can fold the value into
4110 a constant. But if more than one register is involved,
4111 this probably loses. */
4112 else if (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)
4113 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
4116 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4121 /* If the constructor has fewer fields than the structure
4122 or if we are initializing the structure to mostly zeros,
4123 clear the whole structure first. */
4124 else if ((list_length (CONSTRUCTOR_ELTS (exp
))
4125 != list_length (TYPE_FIELDS (type
)))
4126 || mostly_zeros_p (exp
))
4129 clear_storage (target
, expr_size (exp
),
4130 (align
+ BITS_PER_UNIT
- 1) / BITS_PER_UNIT
);
4135 /* Inform later passes that the old value is dead. */
4136 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4138 /* Store each element of the constructor into
4139 the corresponding field of TARGET. */
4141 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4143 register tree field
= TREE_PURPOSE (elt
);
4144 #ifdef WORD_REGISTER_OPERATIONS
4145 tree value
= TREE_VALUE (elt
);
4147 register enum machine_mode mode
;
4151 tree pos
, constant
= 0, offset
= 0;
4152 rtx to_rtx
= target
;
4154 /* Just ignore missing fields.
4155 We cleared the whole structure, above,
4156 if any fields are missing. */
4160 if (cleared
&& is_zeros_p (TREE_VALUE (elt
)))
4163 if (TREE_CODE (DECL_SIZE (field
)) == INTEGER_CST
)
4164 bitsize
= TREE_INT_CST_LOW (DECL_SIZE (field
));
4168 unsignedp
= TREE_UNSIGNED (field
);
4169 mode
= DECL_MODE (field
);
4170 if (DECL_BIT_FIELD (field
))
4173 pos
= DECL_FIELD_BITPOS (field
);
4174 if (TREE_CODE (pos
) == INTEGER_CST
)
4176 else if (TREE_CODE (pos
) == PLUS_EXPR
4177 && TREE_CODE (TREE_OPERAND (pos
, 1)) == INTEGER_CST
)
4178 constant
= TREE_OPERAND (pos
, 1), offset
= TREE_OPERAND (pos
, 0);
4183 bitpos
= TREE_INT_CST_LOW (constant
);
4189 if (contains_placeholder_p (offset
))
4190 offset
= build (WITH_RECORD_EXPR
, sizetype
,
4191 offset
, make_tree (TREE_TYPE (exp
), target
));
4193 offset
= size_binop (EXACT_DIV_EXPR
, offset
,
4194 size_int (BITS_PER_UNIT
));
4196 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
4197 if (GET_CODE (to_rtx
) != MEM
)
4200 if (GET_MODE (offset_rtx
) != ptr_mode
)
4202 #ifdef POINTERS_EXTEND_UNSIGNED
4203 offset_rtx
= convert_memory_address (ptr_mode
, offset_rtx
);
4205 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
4210 = change_address (to_rtx
, VOIDmode
,
4211 gen_rtx_PLUS (ptr_mode
, XEXP (to_rtx
, 0),
4212 force_reg (ptr_mode
,
4216 if (TREE_READONLY (field
))
4218 if (GET_CODE (to_rtx
) == MEM
)
4219 to_rtx
= copy_rtx (to_rtx
);
4221 RTX_UNCHANGING_P (to_rtx
) = 1;
4224 #ifdef WORD_REGISTER_OPERATIONS
4225 /* If this initializes a field that is smaller than a word, at the
4226 start of a word, try to widen it to a full word.
4227 This special case allows us to output C++ member function
4228 initializations in a form that the optimizers can understand. */
4230 && GET_CODE (target
) == REG
4231 && bitsize
< BITS_PER_WORD
4232 && bitpos
% BITS_PER_WORD
== 0
4233 && GET_MODE_CLASS (mode
) == MODE_INT
4234 && TREE_CODE (value
) == INTEGER_CST
4235 && GET_CODE (exp_size
) == CONST_INT
4236 && bitpos
+ BITS_PER_WORD
<= INTVAL (exp_size
) * BITS_PER_UNIT
)
4238 tree type
= TREE_TYPE (value
);
4239 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
4241 type
= type_for_size (BITS_PER_WORD
, TREE_UNSIGNED (type
));
4242 value
= convert (type
, value
);
4244 if (BYTES_BIG_ENDIAN
)
4246 = fold (build (LSHIFT_EXPR
, type
, value
,
4247 build_int_2 (BITS_PER_WORD
- bitsize
, 0)));
4248 bitsize
= BITS_PER_WORD
;
4252 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
4253 TREE_VALUE (elt
), type
,
4255 DECL_ALIGN (TREE_PURPOSE (elt
))),
4259 else if (TREE_CODE (type
) == ARRAY_TYPE
)
4264 tree domain
= TYPE_DOMAIN (type
);
4265 HOST_WIDE_INT minelt
= TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain
));
4266 HOST_WIDE_INT maxelt
= TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain
));
4267 tree elttype
= TREE_TYPE (type
);
4269 /* If the constructor has fewer elements than the array,
4270 clear the whole array first. Similarly if this is
4271 static constructor of a non-BLKmode object. */
4272 if (cleared
|| (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)))
4276 HOST_WIDE_INT count
= 0, zero_count
= 0;
4278 /* This loop is a more accurate version of the loop in
4279 mostly_zeros_p (it handles RANGE_EXPR in an index).
4280 It is also needed to check for missing elements. */
4281 for (elt
= CONSTRUCTOR_ELTS (exp
);
4283 elt
= TREE_CHAIN (elt
))
4285 tree index
= TREE_PURPOSE (elt
);
4286 HOST_WIDE_INT this_node_count
;
4287 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4289 tree lo_index
= TREE_OPERAND (index
, 0);
4290 tree hi_index
= TREE_OPERAND (index
, 1);
4291 if (TREE_CODE (lo_index
) != INTEGER_CST
4292 || TREE_CODE (hi_index
) != INTEGER_CST
)
4297 this_node_count
= TREE_INT_CST_LOW (hi_index
)
4298 - TREE_INT_CST_LOW (lo_index
) + 1;
4301 this_node_count
= 1;
4302 count
+= this_node_count
;
4303 if (mostly_zeros_p (TREE_VALUE (elt
)))
4304 zero_count
+= this_node_count
;
4306 /* Clear the entire array first if there are any missing elements,
4307 or if the incidence of zero elements is >= 75%. */
4308 if (count
< maxelt
- minelt
+ 1
4309 || 4 * zero_count
>= 3 * count
)
4315 clear_storage (target
, expr_size (exp
),
4316 (align
+ BITS_PER_UNIT
- 1) / BITS_PER_UNIT
);
4320 /* Inform later passes that the old value is dead. */
4321 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4323 /* Store each element of the constructor into
4324 the corresponding element of TARGET, determined
4325 by counting the elements. */
4326 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
4328 elt
= TREE_CHAIN (elt
), i
++)
4330 register enum machine_mode mode
;
4334 tree value
= TREE_VALUE (elt
);
4335 int align
= TYPE_ALIGN (TREE_TYPE (value
));
4336 tree index
= TREE_PURPOSE (elt
);
4337 rtx xtarget
= target
;
4339 if (cleared
&& is_zeros_p (value
))
4342 unsignedp
= TREE_UNSIGNED (elttype
);
4343 mode
= TYPE_MODE (elttype
);
4344 if (mode
== BLKmode
)
4346 if (TREE_CODE (TYPE_SIZE (elttype
)) == INTEGER_CST
4347 && TREE_INT_CST_HIGH (TYPE_SIZE (elttype
)) == 0)
4348 bitsize
= TREE_INT_CST_LOW (TYPE_SIZE (elttype
));
4353 bitsize
= GET_MODE_BITSIZE (mode
);
4355 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4357 tree lo_index
= TREE_OPERAND (index
, 0);
4358 tree hi_index
= TREE_OPERAND (index
, 1);
4359 rtx index_r
, pos_rtx
, addr
, hi_r
, loop_top
, loop_end
;
4360 struct nesting
*loop
;
4361 HOST_WIDE_INT lo
, hi
, count
;
4364 /* If the range is constant and "small", unroll the loop. */
4365 if (TREE_CODE (lo_index
) == INTEGER_CST
4366 && TREE_CODE (hi_index
) == INTEGER_CST
4367 && (lo
= TREE_INT_CST_LOW (lo_index
),
4368 hi
= TREE_INT_CST_LOW (hi_index
),
4369 count
= hi
- lo
+ 1,
4370 (GET_CODE (target
) != MEM
4372 || (TREE_CODE (TYPE_SIZE (elttype
)) == INTEGER_CST
4373 && TREE_INT_CST_LOW (TYPE_SIZE (elttype
)) * count
4376 lo
-= minelt
; hi
-= minelt
;
4377 for (; lo
<= hi
; lo
++)
4379 bitpos
= lo
* TREE_INT_CST_LOW (TYPE_SIZE (elttype
));
4380 store_constructor_field (target
, bitsize
, bitpos
, mode
,
4381 value
, type
, align
, cleared
);
4386 hi_r
= expand_expr (hi_index
, NULL_RTX
, VOIDmode
, 0);
4387 loop_top
= gen_label_rtx ();
4388 loop_end
= gen_label_rtx ();
4390 unsignedp
= TREE_UNSIGNED (domain
);
4392 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
4394 DECL_RTL (index
) = index_r
4395 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
4398 if (TREE_CODE (value
) == SAVE_EXPR
4399 && SAVE_EXPR_RTL (value
) == 0)
4401 /* Make sure value gets expanded once before the
4403 expand_expr (value
, const0_rtx
, VOIDmode
, 0);
4406 store_expr (lo_index
, index_r
, 0);
4407 loop
= expand_start_loop (0);
4409 /* Assign value to element index. */
4410 position
= size_binop (EXACT_DIV_EXPR
, TYPE_SIZE (elttype
),
4411 size_int (BITS_PER_UNIT
));
4412 position
= size_binop (MULT_EXPR
,
4413 size_binop (MINUS_EXPR
, index
,
4414 TYPE_MIN_VALUE (domain
)),
4416 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
4417 addr
= gen_rtx_PLUS (Pmode
, XEXP (target
, 0), pos_rtx
);
4418 xtarget
= change_address (target
, mode
, addr
);
4419 if (TREE_CODE (value
) == CONSTRUCTOR
)
4420 store_constructor (value
, xtarget
, align
, cleared
);
4422 store_expr (value
, xtarget
, 0);
4424 expand_exit_loop_if_false (loop
,
4425 build (LT_EXPR
, integer_type_node
,
4428 expand_increment (build (PREINCREMENT_EXPR
,
4430 index
, integer_one_node
), 0, 0);
4432 emit_label (loop_end
);
4434 /* Needed by stupid register allocation. to extend the
4435 lifetime of pseudo-regs used by target past the end
4437 emit_insn (gen_rtx_USE (GET_MODE (target
), target
));
4440 else if ((index
!= 0 && TREE_CODE (index
) != INTEGER_CST
)
4441 || TREE_CODE (TYPE_SIZE (elttype
)) != INTEGER_CST
)
4447 index
= size_int (i
);
4450 index
= size_binop (MINUS_EXPR
, index
,
4451 TYPE_MIN_VALUE (domain
));
4452 position
= size_binop (EXACT_DIV_EXPR
, TYPE_SIZE (elttype
),
4453 size_int (BITS_PER_UNIT
));
4454 position
= size_binop (MULT_EXPR
, index
, position
);
4455 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
4456 addr
= gen_rtx_PLUS (Pmode
, XEXP (target
, 0), pos_rtx
);
4457 xtarget
= change_address (target
, mode
, addr
);
4458 store_expr (value
, xtarget
, 0);
4463 bitpos
= ((TREE_INT_CST_LOW (index
) - minelt
)
4464 * TREE_INT_CST_LOW (TYPE_SIZE (elttype
)));
4466 bitpos
= (i
* TREE_INT_CST_LOW (TYPE_SIZE (elttype
)));
4467 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
4468 type
, align
, cleared
);
4472 /* set constructor assignments */
4473 else if (TREE_CODE (type
) == SET_TYPE
)
4475 tree elt
= CONSTRUCTOR_ELTS (exp
);
4476 int nbytes
= int_size_in_bytes (type
), nbits
;
4477 tree domain
= TYPE_DOMAIN (type
);
4478 tree domain_min
, domain_max
, bitlength
;
4480 /* The default implementation strategy is to extract the constant
4481 parts of the constructor, use that to initialize the target,
4482 and then "or" in whatever non-constant ranges we need in addition.
4484 If a large set is all zero or all ones, it is
4485 probably better to set it using memset (if available) or bzero.
4486 Also, if a large set has just a single range, it may also be
4487 better to first clear all the first clear the set (using
4488 bzero/memset), and set the bits we want. */
4490 /* Check for all zeros. */
4491 if (elt
== NULL_TREE
)
4494 clear_storage (target
, expr_size (exp
),
4495 TYPE_ALIGN (type
) / BITS_PER_UNIT
);
4499 domain_min
= convert (sizetype
, TYPE_MIN_VALUE (domain
));
4500 domain_max
= convert (sizetype
, TYPE_MAX_VALUE (domain
));
4501 bitlength
= size_binop (PLUS_EXPR
,
4502 size_binop (MINUS_EXPR
, domain_max
, domain_min
),
4505 if (nbytes
< 0 || TREE_CODE (bitlength
) != INTEGER_CST
)
4507 nbits
= TREE_INT_CST_LOW (bitlength
);
4509 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4510 are "complicated" (more than one range), initialize (the
4511 constant parts) by copying from a constant. */
4512 if (GET_MODE (target
) != BLKmode
|| nbits
<= 2 * BITS_PER_WORD
4513 || (nbytes
<= 32 && TREE_CHAIN (elt
) != NULL_TREE
))
4515 int set_word_size
= TYPE_ALIGN (TREE_TYPE (exp
));
4516 enum machine_mode mode
= mode_for_size (set_word_size
, MODE_INT
, 1);
4517 char *bit_buffer
= (char *) alloca (nbits
);
4518 HOST_WIDE_INT word
= 0;
4521 int offset
= 0; /* In bytes from beginning of set. */
4522 elt
= get_set_constructor_bits (exp
, bit_buffer
, nbits
);
4525 if (bit_buffer
[ibit
])
4527 if (BYTES_BIG_ENDIAN
)
4528 word
|= (1 << (set_word_size
- 1 - bit_pos
));
4530 word
|= 1 << bit_pos
;
4533 if (bit_pos
>= set_word_size
|| ibit
== nbits
)
4535 if (word
!= 0 || ! cleared
)
4537 rtx datum
= GEN_INT (word
);
4539 /* The assumption here is that it is safe to use
4540 XEXP if the set is multi-word, but not if
4541 it's single-word. */
4542 if (GET_CODE (target
) == MEM
)
4544 to_rtx
= plus_constant (XEXP (target
, 0), offset
);
4545 to_rtx
= change_address (target
, mode
, to_rtx
);
4547 else if (offset
== 0)
4551 emit_move_insn (to_rtx
, datum
);
4557 offset
+= set_word_size
/ BITS_PER_UNIT
;
4563 /* Don't bother clearing storage if the set is all ones. */
4564 if (TREE_CHAIN (elt
) != NULL_TREE
4565 || (TREE_PURPOSE (elt
) == NULL_TREE
4567 : (TREE_CODE (TREE_VALUE (elt
)) != INTEGER_CST
4568 || TREE_CODE (TREE_PURPOSE (elt
)) != INTEGER_CST
4569 || (TREE_INT_CST_LOW (TREE_VALUE (elt
))
4570 - TREE_INT_CST_LOW (TREE_PURPOSE (elt
)) + 1
4572 clear_storage (target
, expr_size (exp
),
4573 TYPE_ALIGN (type
) / BITS_PER_UNIT
);
4576 for (; elt
!= NULL_TREE
; elt
= TREE_CHAIN (elt
))
4578 /* start of range of element or NULL */
4579 tree startbit
= TREE_PURPOSE (elt
);
4580 /* end of range of element, or element value */
4581 tree endbit
= TREE_VALUE (elt
);
4582 #ifdef TARGET_MEM_FUNCTIONS
4583 HOST_WIDE_INT startb
, endb
;
4585 rtx bitlength_rtx
, startbit_rtx
, endbit_rtx
, targetx
;
4587 bitlength_rtx
= expand_expr (bitlength
,
4588 NULL_RTX
, MEM
, EXPAND_CONST_ADDRESS
);
4590 /* handle non-range tuple element like [ expr ] */
4591 if (startbit
== NULL_TREE
)
4593 startbit
= save_expr (endbit
);
4596 startbit
= convert (sizetype
, startbit
);
4597 endbit
= convert (sizetype
, endbit
);
4598 if (! integer_zerop (domain_min
))
4600 startbit
= size_binop (MINUS_EXPR
, startbit
, domain_min
);
4601 endbit
= size_binop (MINUS_EXPR
, endbit
, domain_min
);
4603 startbit_rtx
= expand_expr (startbit
, NULL_RTX
, MEM
,
4604 EXPAND_CONST_ADDRESS
);
4605 endbit_rtx
= expand_expr (endbit
, NULL_RTX
, MEM
,
4606 EXPAND_CONST_ADDRESS
);
4610 targetx
= assign_stack_temp (GET_MODE (target
),
4611 GET_MODE_SIZE (GET_MODE (target
)),
4613 emit_move_insn (targetx
, target
);
4615 else if (GET_CODE (target
) == MEM
)
4620 #ifdef TARGET_MEM_FUNCTIONS
4621 /* Optimization: If startbit and endbit are
4622 constants divisible by BITS_PER_UNIT,
4623 call memset instead. */
4624 if (TREE_CODE (startbit
) == INTEGER_CST
4625 && TREE_CODE (endbit
) == INTEGER_CST
4626 && (startb
= TREE_INT_CST_LOW (startbit
)) % BITS_PER_UNIT
== 0
4627 && (endb
= TREE_INT_CST_LOW (endbit
) + 1) % BITS_PER_UNIT
== 0)
4629 emit_library_call (memset_libfunc
, 0,
4631 plus_constant (XEXP (targetx
, 0),
4632 startb
/ BITS_PER_UNIT
),
4634 constm1_rtx
, TYPE_MODE (integer_type_node
),
4635 GEN_INT ((endb
- startb
) / BITS_PER_UNIT
),
4636 TYPE_MODE (sizetype
));
4641 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "__setbits"),
4642 0, VOIDmode
, 4, XEXP (targetx
, 0), Pmode
,
4643 bitlength_rtx
, TYPE_MODE (sizetype
),
4644 startbit_rtx
, TYPE_MODE (sizetype
),
4645 endbit_rtx
, TYPE_MODE (sizetype
));
4648 emit_move_insn (target
, targetx
);
4656 /* Store the value of EXP (an expression tree)
4657 into a subfield of TARGET which has mode MODE and occupies
4658 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4659 If MODE is VOIDmode, it means that we are storing into a bit-field.
4661 If VALUE_MODE is VOIDmode, return nothing in particular.
4662 UNSIGNEDP is not used in this case.
4664 Otherwise, return an rtx for the value stored. This rtx
4665 has mode VALUE_MODE if that is convenient to do.
4666 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4668 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4669 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4671 ALIAS_SET is the alias set for the destination. This value will
4672 (in general) be different from that for TARGET, since TARGET is a
4673 reference to the containing structure. */
4676 store_field (target
, bitsize
, bitpos
, mode
, exp
, value_mode
,
4677 unsignedp
, align
, total_size
, alias_set
)
4679 int bitsize
, bitpos
;
4680 enum machine_mode mode
;
4682 enum machine_mode value_mode
;
4688 HOST_WIDE_INT width_mask
= 0;
4690 if (TREE_CODE (exp
) == ERROR_MARK
)
4693 if (bitsize
< HOST_BITS_PER_WIDE_INT
)
4694 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
4696 /* If we are storing into an unaligned field of an aligned union that is
4697 in a register, we may have the mode of TARGET being an integer mode but
4698 MODE == BLKmode. In that case, get an aligned object whose size and
4699 alignment are the same as TARGET and store TARGET into it (we can avoid
4700 the store if the field being stored is the entire width of TARGET). Then
4701 call ourselves recursively to store the field into a BLKmode version of
4702 that object. Finally, load from the object into TARGET. This is not
4703 very efficient in general, but should only be slightly more expensive
4704 than the otherwise-required unaligned accesses. Perhaps this can be
4705 cleaned up later. */
4708 && (GET_CODE (target
) == REG
|| GET_CODE (target
) == SUBREG
))
4710 rtx object
= assign_stack_temp (GET_MODE (target
),
4711 GET_MODE_SIZE (GET_MODE (target
)), 0);
4712 rtx blk_object
= copy_rtx (object
);
4714 MEM_SET_IN_STRUCT_P (object
, 1);
4715 MEM_SET_IN_STRUCT_P (blk_object
, 1);
4716 PUT_MODE (blk_object
, BLKmode
);
4718 if (bitsize
!= GET_MODE_BITSIZE (GET_MODE (target
)))
4719 emit_move_insn (object
, target
);
4721 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0,
4722 align
, total_size
, alias_set
);
4724 /* Even though we aren't returning target, we need to
4725 give it the updated value. */
4726 emit_move_insn (target
, object
);
4731 /* If the structure is in a register or if the component
4732 is a bit field, we cannot use addressing to access it.
4733 Use bit-field techniques or SUBREG to store in it. */
4735 if (mode
== VOIDmode
4736 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
4737 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
4738 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
4739 || GET_CODE (target
) == REG
4740 || GET_CODE (target
) == SUBREG
4741 /* If the field isn't aligned enough to store as an ordinary memref,
4742 store it as a bit field. */
4743 || (mode
!= BLKmode
&& SLOW_UNALIGNED_ACCESS
4744 && (align
* BITS_PER_UNIT
< GET_MODE_ALIGNMENT (mode
)
4745 || bitpos
% GET_MODE_ALIGNMENT (mode
)))
4746 || (mode
== BLKmode
&& SLOW_UNALIGNED_ACCESS
4747 && (TYPE_ALIGN (TREE_TYPE (exp
)) > align
* BITS_PER_UNIT
4748 || bitpos
% TYPE_ALIGN (TREE_TYPE (exp
)) != 0))
4749 /* If the RHS and field are a constant size and the size of the
4750 RHS isn't the same size as the bitfield, we must use bitfield
4753 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
)
4754 && (TREE_INT_CST_HIGH (TYPE_SIZE (TREE_TYPE (exp
))) != 0
4755 || TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp
))) != bitsize
)))
4757 rtx temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
4759 /* If BITSIZE is narrower than the size of the type of EXP
4760 we will be narrowing TEMP. Normally, what's wanted are the
4761 low-order bits. However, if EXP's type is a record and this is
4762 big-endian machine, we want the upper BITSIZE bits. */
4763 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
4764 && bitsize
< GET_MODE_BITSIZE (GET_MODE (temp
))
4765 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
4766 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
4767 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
4771 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4773 if (mode
!= VOIDmode
&& mode
!= BLKmode
4774 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
4775 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
4777 /* If the modes of TARGET and TEMP are both BLKmode, both
4778 must be in memory and BITPOS must be aligned on a byte
4779 boundary. If so, we simply do a block copy. */
4780 if (GET_MODE (target
) == BLKmode
&& GET_MODE (temp
) == BLKmode
)
4782 if (GET_CODE (target
) != MEM
|| GET_CODE (temp
) != MEM
4783 || bitpos
% BITS_PER_UNIT
!= 0)
4786 target
= change_address (target
, VOIDmode
,
4787 plus_constant (XEXP (target
, 0),
4788 bitpos
/ BITS_PER_UNIT
));
4790 /* Find an alignment that is consistent with the bit position. */
4791 while ((bitpos
% (align
* BITS_PER_UNIT
)) != 0)
4794 emit_block_move (target
, temp
,
4795 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
4799 return value_mode
== VOIDmode
? const0_rtx
: target
;
4802 /* Store the value in the bitfield. */
4803 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
, align
, total_size
);
4804 if (value_mode
!= VOIDmode
)
4806 /* The caller wants an rtx for the value. */
4807 /* If possible, avoid refetching from the bitfield itself. */
4809 && ! (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
)))
4812 enum machine_mode tmode
;
4815 return expand_and (temp
, GEN_INT (width_mask
), NULL_RTX
);
4816 tmode
= GET_MODE (temp
);
4817 if (tmode
== VOIDmode
)
4819 count
= build_int_2 (GET_MODE_BITSIZE (tmode
) - bitsize
, 0);
4820 temp
= expand_shift (LSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
4821 return expand_shift (RSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
4823 return extract_bit_field (target
, bitsize
, bitpos
, unsignedp
,
4824 NULL_RTX
, value_mode
, 0, align
,
4831 rtx addr
= XEXP (target
, 0);
4834 /* If a value is wanted, it must be the lhs;
4835 so make the address stable for multiple use. */
4837 if (value_mode
!= VOIDmode
&& GET_CODE (addr
) != REG
4838 && ! CONSTANT_ADDRESS_P (addr
)
4839 /* A frame-pointer reference is already stable. */
4840 && ! (GET_CODE (addr
) == PLUS
4841 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
4842 && (XEXP (addr
, 0) == virtual_incoming_args_rtx
4843 || XEXP (addr
, 0) == virtual_stack_vars_rtx
)))
4844 addr
= copy_to_reg (addr
);
4846 /* Now build a reference to just the desired component. */
4848 to_rtx
= copy_rtx (change_address (target
, mode
,
4849 plus_constant (addr
,
4851 / BITS_PER_UNIT
))));
4852 MEM_SET_IN_STRUCT_P (to_rtx
, 1);
4853 MEM_ALIAS_SET (to_rtx
) = alias_set
;
4855 return store_expr (exp
, to_rtx
, value_mode
!= VOIDmode
);
4859 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4860 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4861 ARRAY_REFs and find the ultimate containing object, which we return.
4863 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4864 bit position, and *PUNSIGNEDP to the signedness of the field.
4865 If the position of the field is variable, we store a tree
4866 giving the variable offset (in units) in *POFFSET.
4867 This offset is in addition to the bit position.
4868 If the position is not variable, we store 0 in *POFFSET.
4869 We set *PALIGNMENT to the alignment in bytes of the address that will be
4870 computed. This is the alignment of the thing we return if *POFFSET
4871 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4873 If any of the extraction expressions is volatile,
4874 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4876 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4877 is a mode that can be used to access the field. In that case, *PBITSIZE
4880 If the field describes a variable-sized object, *PMODE is set to
4881 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4882 this case, but the address of the object can be found. */
4885 get_inner_reference (exp
, pbitsize
, pbitpos
, poffset
, pmode
,
4886 punsignedp
, pvolatilep
, palignment
)
4891 enum machine_mode
*pmode
;
4896 tree orig_exp
= exp
;
4898 enum machine_mode mode
= VOIDmode
;
4899 tree offset
= integer_zero_node
;
4900 unsigned int alignment
= BIGGEST_ALIGNMENT
;
4902 if (TREE_CODE (exp
) == COMPONENT_REF
)
4904 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
4905 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
4906 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
4907 *punsignedp
= TREE_UNSIGNED (TREE_OPERAND (exp
, 1));
4909 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
4911 size_tree
= TREE_OPERAND (exp
, 1);
4912 *punsignedp
= TREE_UNSIGNED (exp
);
4916 mode
= TYPE_MODE (TREE_TYPE (exp
));
4917 if (mode
== BLKmode
)
4918 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
4920 *pbitsize
= GET_MODE_BITSIZE (mode
);
4921 *punsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
4926 if (TREE_CODE (size_tree
) != INTEGER_CST
)
4927 mode
= BLKmode
, *pbitsize
= -1;
4929 *pbitsize
= TREE_INT_CST_LOW (size_tree
);
4932 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4933 and find the ultimate containing object. */
4939 if (TREE_CODE (exp
) == COMPONENT_REF
|| TREE_CODE (exp
) == BIT_FIELD_REF
)
4941 tree pos
= (TREE_CODE (exp
) == COMPONENT_REF
4942 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp
, 1))
4943 : TREE_OPERAND (exp
, 2));
4944 tree constant
= integer_zero_node
, var
= pos
;
4946 /* If this field hasn't been filled in yet, don't go
4947 past it. This should only happen when folding expressions
4948 made during type construction. */
4952 /* Assume here that the offset is a multiple of a unit.
4953 If not, there should be an explicitly added constant. */
4954 if (TREE_CODE (pos
) == PLUS_EXPR
4955 && TREE_CODE (TREE_OPERAND (pos
, 1)) == INTEGER_CST
)
4956 constant
= TREE_OPERAND (pos
, 1), var
= TREE_OPERAND (pos
, 0);
4957 else if (TREE_CODE (pos
) == INTEGER_CST
)
4958 constant
= pos
, var
= integer_zero_node
;
4960 *pbitpos
+= TREE_INT_CST_LOW (constant
);
4961 offset
= size_binop (PLUS_EXPR
, offset
,
4962 size_binop (EXACT_DIV_EXPR
, var
,
4963 size_int (BITS_PER_UNIT
)));
4966 else if (TREE_CODE (exp
) == ARRAY_REF
)
4968 /* This code is based on the code in case ARRAY_REF in expand_expr
4969 below. We assume here that the size of an array element is
4970 always an integral multiple of BITS_PER_UNIT. */
4972 tree index
= TREE_OPERAND (exp
, 1);
4973 tree domain
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4975 = domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
4976 tree index_type
= TREE_TYPE (index
);
4979 if (TYPE_PRECISION (index_type
) != TYPE_PRECISION (sizetype
))
4981 index
= convert (type_for_size (TYPE_PRECISION (sizetype
), 0),
4983 index_type
= TREE_TYPE (index
);
4986 /* Optimize the special-case of a zero lower bound.
4988 We convert the low_bound to sizetype to avoid some problems
4989 with constant folding. (E.g. suppose the lower bound is 1,
4990 and its mode is QI. Without the conversion, (ARRAY
4991 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4992 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4994 But sizetype isn't quite right either (especially if
4995 the lowbound is negative). FIXME */
4997 if (! integer_zerop (low_bound
))
4998 index
= fold (build (MINUS_EXPR
, index_type
, index
,
4999 convert (sizetype
, low_bound
)));
5001 if (TREE_CODE (index
) == INTEGER_CST
)
5003 index
= convert (sbitsizetype
, index
);
5004 index_type
= TREE_TYPE (index
);
5007 xindex
= fold (build (MULT_EXPR
, sbitsizetype
, index
,
5008 convert (sbitsizetype
,
5009 TYPE_SIZE (TREE_TYPE (exp
)))));
5011 if (TREE_CODE (xindex
) == INTEGER_CST
5012 && TREE_INT_CST_HIGH (xindex
) == 0)
5013 *pbitpos
+= TREE_INT_CST_LOW (xindex
);
5016 /* Either the bit offset calculated above is not constant, or
5017 it overflowed. In either case, redo the multiplication
5018 against the size in units. This is especially important
5019 in the non-constant case to avoid a division at runtime. */
5020 xindex
= fold (build (MULT_EXPR
, ssizetype
, index
,
5022 TYPE_SIZE_UNIT (TREE_TYPE (exp
)))));
5024 if (contains_placeholder_p (xindex
))
5025 xindex
= build (WITH_RECORD_EXPR
, sizetype
, xindex
, exp
);
5027 offset
= size_binop (PLUS_EXPR
, offset
, xindex
);
5030 else if (TREE_CODE (exp
) != NON_LVALUE_EXPR
5031 && ! ((TREE_CODE (exp
) == NOP_EXPR
5032 || TREE_CODE (exp
) == CONVERT_EXPR
)
5033 && (TYPE_MODE (TREE_TYPE (exp
))
5034 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
5037 /* If any reference in the chain is volatile, the effect is volatile. */
5038 if (TREE_THIS_VOLATILE (exp
))
5041 /* If the offset is non-constant already, then we can't assume any
5042 alignment more than the alignment here. */
5043 if (! integer_zerop (offset
))
5044 alignment
= MIN (alignment
, TYPE_ALIGN (TREE_TYPE (exp
)));
5046 exp
= TREE_OPERAND (exp
, 0);
5049 if (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'd')
5050 alignment
= MIN (alignment
, DECL_ALIGN (exp
));
5051 else if (TREE_TYPE (exp
) != 0)
5052 alignment
= MIN (alignment
, TYPE_ALIGN (TREE_TYPE (exp
)));
5054 if (integer_zerop (offset
))
5057 if (offset
!= 0 && contains_placeholder_p (offset
))
5058 offset
= build (WITH_RECORD_EXPR
, sizetype
, offset
, orig_exp
);
5062 *palignment
= alignment
/ BITS_PER_UNIT
;
5066 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5067 static enum memory_use_mode
5068 get_memory_usage_from_modifier (modifier
)
5069 enum expand_modifier modifier
;
5075 return MEMORY_USE_RO
;
5077 case EXPAND_MEMORY_USE_WO
:
5078 return MEMORY_USE_WO
;
5080 case EXPAND_MEMORY_USE_RW
:
5081 return MEMORY_USE_RW
;
5083 case EXPAND_MEMORY_USE_DONT
:
5084 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5085 MEMORY_USE_DONT, because they are modifiers to a call of
5086 expand_expr in the ADDR_EXPR case of expand_expr. */
5087 case EXPAND_CONST_ADDRESS
:
5088 case EXPAND_INITIALIZER
:
5089 return MEMORY_USE_DONT
;
5090 case EXPAND_MEMORY_USE_BAD
:
5096 /* Given an rtx VALUE that may contain additions and multiplications,
5097 return an equivalent value that just refers to a register or memory.
5098 This is done by generating instructions to perform the arithmetic
5099 and returning a pseudo-register containing the value.
5101 The returned value may be a REG, SUBREG, MEM or constant. */
5104 force_operand (value
, target
)
5107 register optab binoptab
= 0;
5108 /* Use a temporary to force order of execution of calls to
5112 /* Use subtarget as the target for operand 0 of a binary operation. */
5113 register rtx subtarget
= (target
!= 0 && GET_CODE (target
) == REG
? target
: 0);
5115 /* Check for a PIC address load. */
5117 && (GET_CODE (value
) == PLUS
|| GET_CODE (value
) == MINUS
)
5118 && XEXP (value
, 0) == pic_offset_table_rtx
5119 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
5120 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
5121 || GET_CODE (XEXP (value
, 1)) == CONST
))
5124 subtarget
= gen_reg_rtx (GET_MODE (value
));
5125 emit_move_insn (subtarget
, value
);
5129 if (GET_CODE (value
) == PLUS
)
5130 binoptab
= add_optab
;
5131 else if (GET_CODE (value
) == MINUS
)
5132 binoptab
= sub_optab
;
5133 else if (GET_CODE (value
) == MULT
)
5135 op2
= XEXP (value
, 1);
5136 if (!CONSTANT_P (op2
)
5137 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
5139 tmp
= force_operand (XEXP (value
, 0), subtarget
);
5140 return expand_mult (GET_MODE (value
), tmp
,
5141 force_operand (op2
, NULL_RTX
),
5147 op2
= XEXP (value
, 1);
5148 if (!CONSTANT_P (op2
)
5149 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
5151 if (binoptab
== sub_optab
&& GET_CODE (op2
) == CONST_INT
)
5153 binoptab
= add_optab
;
5154 op2
= negate_rtx (GET_MODE (value
), op2
);
5157 /* Check for an addition with OP2 a constant integer and our first
5158 operand a PLUS of a virtual register and something else. In that
5159 case, we want to emit the sum of the virtual register and the
5160 constant first and then add the other value. This allows virtual
5161 register instantiation to simply modify the constant rather than
5162 creating another one around this addition. */
5163 if (binoptab
== add_optab
&& GET_CODE (op2
) == CONST_INT
5164 && GET_CODE (XEXP (value
, 0)) == PLUS
5165 && GET_CODE (XEXP (XEXP (value
, 0), 0)) == REG
5166 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5167 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
5169 rtx temp
= expand_binop (GET_MODE (value
), binoptab
,
5170 XEXP (XEXP (value
, 0), 0), op2
,
5171 subtarget
, 0, OPTAB_LIB_WIDEN
);
5172 return expand_binop (GET_MODE (value
), binoptab
, temp
,
5173 force_operand (XEXP (XEXP (value
, 0), 1), 0),
5174 target
, 0, OPTAB_LIB_WIDEN
);
5177 tmp
= force_operand (XEXP (value
, 0), subtarget
);
5178 return expand_binop (GET_MODE (value
), binoptab
, tmp
,
5179 force_operand (op2
, NULL_RTX
),
5180 target
, 0, OPTAB_LIB_WIDEN
);
5181 /* We give UNSIGNEDP = 0 to expand_binop
5182 because the only operations we are expanding here are signed ones. */
5187 /* Subroutine of expand_expr:
5188 save the non-copied parts (LIST) of an expr (LHS), and return a list
5189 which can restore these values to their previous values,
5190 should something modify their storage. */
5193 save_noncopied_parts (lhs
, list
)
5200 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
5201 if (TREE_CODE (TREE_VALUE (tail
)) == TREE_LIST
)
5202 parts
= chainon (parts
, save_noncopied_parts (lhs
, TREE_VALUE (tail
)));
5205 tree part
= TREE_VALUE (tail
);
5206 tree part_type
= TREE_TYPE (part
);
5207 tree to_be_saved
= build (COMPONENT_REF
, part_type
, lhs
, part
);
5208 rtx target
= assign_temp (part_type
, 0, 1, 1);
5209 if (! memory_address_p (TYPE_MODE (part_type
), XEXP (target
, 0)))
5210 target
= change_address (target
, TYPE_MODE (part_type
), NULL_RTX
);
5211 parts
= tree_cons (to_be_saved
,
5212 build (RTL_EXPR
, part_type
, NULL_TREE
,
5215 store_expr (TREE_PURPOSE (parts
), RTL_EXPR_RTL (TREE_VALUE (parts
)), 0);
5220 /* Subroutine of expand_expr:
5221 record the non-copied parts (LIST) of an expr (LHS), and return a list
5222 which specifies the initial values of these parts. */
5225 init_noncopied_parts (lhs
, list
)
5232 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
5233 if (TREE_CODE (TREE_VALUE (tail
)) == TREE_LIST
)
5234 parts
= chainon (parts
, init_noncopied_parts (lhs
, TREE_VALUE (tail
)));
5235 else if (TREE_PURPOSE (tail
))
5237 tree part
= TREE_VALUE (tail
);
5238 tree part_type
= TREE_TYPE (part
);
5239 tree to_be_initialized
= build (COMPONENT_REF
, part_type
, lhs
, part
);
5240 parts
= tree_cons (TREE_PURPOSE (tail
), to_be_initialized
, parts
);
5245 /* Subroutine of expand_expr: return nonzero iff there is no way that
5246 EXP can reference X, which is being modified. TOP_P is nonzero if this
5247 call is going to be used to determine whether we need a temporary
5248 for EXP, as opposed to a recursive call to this function.
5250 It is always safe for this routine to return zero since it merely
5251 searches for optimization opportunities. */
5254 safe_from_p (x
, exp
, top_p
)
5261 static int save_expr_count
;
5262 static int save_expr_size
= 0;
5263 static tree
*save_expr_rewritten
;
5264 static tree save_expr_trees
[256];
5267 /* If EXP has varying size, we MUST use a target since we currently
5268 have no way of allocating temporaries of variable size
5269 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5270 So we assume here that something at a higher level has prevented a
5271 clash. This is somewhat bogus, but the best we can do. Only
5272 do this when X is BLKmode and when we are at the top level. */
5273 || (top_p
&& TREE_TYPE (exp
) != 0 && TYPE_SIZE (TREE_TYPE (exp
)) != 0
5274 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
5275 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
5276 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
5277 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
5279 && GET_MODE (x
) == BLKmode
))
5282 if (top_p
&& save_expr_size
== 0)
5286 save_expr_count
= 0;
5287 save_expr_size
= sizeof (save_expr_trees
) / sizeof (save_expr_trees
[0]);
5288 save_expr_rewritten
= &save_expr_trees
[0];
5290 rtn
= safe_from_p (x
, exp
, 1);
5292 for (i
= 0; i
< save_expr_count
; ++i
)
5294 if (TREE_CODE (save_expr_trees
[i
]) != ERROR_MARK
)
5296 TREE_SET_CODE (save_expr_trees
[i
], SAVE_EXPR
);
5304 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5305 find the underlying pseudo. */
5306 if (GET_CODE (x
) == SUBREG
)
5309 if (GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5313 /* If X is a location in the outgoing argument area, it is always safe. */
5314 if (GET_CODE (x
) == MEM
5315 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
5316 || (GET_CODE (XEXP (x
, 0)) == PLUS
5317 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
)))
5320 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
5323 exp_rtl
= DECL_RTL (exp
);
5330 if (TREE_CODE (exp
) == TREE_LIST
)
5331 return ((TREE_VALUE (exp
) == 0
5332 || safe_from_p (x
, TREE_VALUE (exp
), 0))
5333 && (TREE_CHAIN (exp
) == 0
5334 || safe_from_p (x
, TREE_CHAIN (exp
), 0)));
5335 else if (TREE_CODE (exp
) == ERROR_MARK
)
5336 return 1; /* An already-visited SAVE_EXPR? */
5341 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5345 return (safe_from_p (x
, TREE_OPERAND (exp
, 0), 0)
5346 && safe_from_p (x
, TREE_OPERAND (exp
, 1), 0));
5350 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5351 the expression. If it is set, we conflict iff we are that rtx or
5352 both are in memory. Otherwise, we check all operands of the
5353 expression recursively. */
5355 switch (TREE_CODE (exp
))
5358 return (staticp (TREE_OPERAND (exp
, 0))
5359 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0)
5360 || TREE_STATIC (exp
));
5363 if (GET_CODE (x
) == MEM
)
5368 exp_rtl
= CALL_EXPR_RTL (exp
);
5371 /* Assume that the call will clobber all hard registers and
5373 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5374 || GET_CODE (x
) == MEM
)
5381 /* If a sequence exists, we would have to scan every instruction
5382 in the sequence to see if it was safe. This is probably not
5384 if (RTL_EXPR_SEQUENCE (exp
))
5387 exp_rtl
= RTL_EXPR_RTL (exp
);
5390 case WITH_CLEANUP_EXPR
:
5391 exp_rtl
= RTL_EXPR_RTL (exp
);
5394 case CLEANUP_POINT_EXPR
:
5395 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5398 exp_rtl
= SAVE_EXPR_RTL (exp
);
5402 /* This SAVE_EXPR might appear many times in the top-level
5403 safe_from_p() expression, and if it has a complex
5404 subexpression, examining it multiple times could result
5405 in a combinatorial explosion. E.g. on an Alpha
5406 running at least 200MHz, a Fortran test case compiled with
5407 optimization took about 28 minutes to compile -- even though
5408 it was only a few lines long, and the complicated line causing
5409 so much time to be spent in the earlier version of safe_from_p()
5410 had only 293 or so unique nodes.
5412 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5413 where it is so we can turn it back in the top-level safe_from_p()
5416 /* For now, don't bother re-sizing the array. */
5417 if (save_expr_count
>= save_expr_size
)
5419 save_expr_rewritten
[save_expr_count
++] = exp
;
5421 nops
= tree_code_length
[(int) SAVE_EXPR
];
5422 for (i
= 0; i
< nops
; i
++)
5424 tree operand
= TREE_OPERAND (exp
, i
);
5425 if (operand
== NULL_TREE
)
5427 TREE_SET_CODE (exp
, ERROR_MARK
);
5428 if (!safe_from_p (x
, operand
, 0))
5430 TREE_SET_CODE (exp
, SAVE_EXPR
);
5432 TREE_SET_CODE (exp
, ERROR_MARK
);
5436 /* The only operand we look at is operand 1. The rest aren't
5437 part of the expression. */
5438 return safe_from_p (x
, TREE_OPERAND (exp
, 1), 0);
5440 case METHOD_CALL_EXPR
:
5441 /* This takes a rtx argument, but shouldn't appear here. */
5448 /* If we have an rtx, we do not need to scan our operands. */
5452 nops
= tree_code_length
[(int) TREE_CODE (exp
)];
5453 for (i
= 0; i
< nops
; i
++)
5454 if (TREE_OPERAND (exp
, i
) != 0
5455 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
5459 /* If we have an rtl, find any enclosed object. Then see if we conflict
5463 if (GET_CODE (exp_rtl
) == SUBREG
)
5465 exp_rtl
= SUBREG_REG (exp_rtl
);
5466 if (GET_CODE (exp_rtl
) == REG
5467 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
5471 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5472 are memory and EXP is not readonly. */
5473 return ! (rtx_equal_p (x
, exp_rtl
)
5474 || (GET_CODE (x
) == MEM
&& GET_CODE (exp_rtl
) == MEM
5475 && ! TREE_READONLY (exp
)));
5478 /* If we reach here, it is safe. */
5482 /* Subroutine of expand_expr: return nonzero iff EXP is an
5483 expression whose type is statically determinable. */
5489 if (TREE_CODE (exp
) == PARM_DECL
5490 || TREE_CODE (exp
) == VAR_DECL
5491 || TREE_CODE (exp
) == CALL_EXPR
|| TREE_CODE (exp
) == TARGET_EXPR
5492 || TREE_CODE (exp
) == COMPONENT_REF
5493 || TREE_CODE (exp
) == ARRAY_REF
)
5498 /* Subroutine of expand_expr: return rtx if EXP is a
5499 variable or parameter; else return 0. */
5506 switch (TREE_CODE (exp
))
5510 return DECL_RTL (exp
);
5516 #ifdef MAX_INTEGER_COMPUTATION_MODE
5518 check_max_integer_computation_mode (exp
)
5521 enum tree_code code
;
5522 enum machine_mode mode
;
5524 /* Strip any NOPs that don't change the mode. */
5526 code
= TREE_CODE (exp
);
5528 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5529 if (code
== NOP_EXPR
5530 && TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
5533 /* First check the type of the overall operation. We need only look at
5534 unary, binary and relational operations. */
5535 if (TREE_CODE_CLASS (code
) == '1'
5536 || TREE_CODE_CLASS (code
) == '2'
5537 || TREE_CODE_CLASS (code
) == '<')
5539 mode
= TYPE_MODE (TREE_TYPE (exp
));
5540 if (GET_MODE_CLASS (mode
) == MODE_INT
5541 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5542 fatal ("unsupported wide integer operation");
5545 /* Check operand of a unary op. */
5546 if (TREE_CODE_CLASS (code
) == '1')
5548 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5549 if (GET_MODE_CLASS (mode
) == MODE_INT
5550 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5551 fatal ("unsupported wide integer operation");
5554 /* Check operands of a binary/comparison op. */
5555 if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<')
5557 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5558 if (GET_MODE_CLASS (mode
) == MODE_INT
5559 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5560 fatal ("unsupported wide integer operation");
5562 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1)));
5563 if (GET_MODE_CLASS (mode
) == MODE_INT
5564 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5565 fatal ("unsupported wide integer operation");
5571 /* Utility function used by expand_expr to see if TYPE, a RECORD_TYPE,
5572 has any readonly fields. If any of the fields have types that
5573 contain readonly fields, return true as well. */
5576 readonly_fields_p (type
)
5581 for (field
= TYPE_FIELDS (type
); field
!= 0; field
= TREE_CHAIN (field
))
5582 if (TREE_CODE (field
) == FIELD_DECL
5583 && (TREE_READONLY (field
)
5584 || (TREE_CODE (TREE_TYPE (field
)) == RECORD_TYPE
5585 && readonly_fields_p (TREE_TYPE (field
)))))
5591 /* expand_expr: generate code for computing expression EXP.
5592 An rtx for the computed value is returned. The value is never null.
5593 In the case of a void EXP, const0_rtx is returned.
5595 The value may be stored in TARGET if TARGET is nonzero.
5596 TARGET is just a suggestion; callers must assume that
5597 the rtx returned may not be the same as TARGET.
5599 If TARGET is CONST0_RTX, it means that the value will be ignored.
5601 If TMODE is not VOIDmode, it suggests generating the
5602 result in mode TMODE. But this is done only when convenient.
5603 Otherwise, TMODE is ignored and the value generated in its natural mode.
5604 TMODE is just a suggestion; callers must assume that
5605 the rtx returned may not have mode TMODE.
5607 Note that TARGET may have neither TMODE nor MODE. In that case, it
5608 probably will not be used.
5610 If MODIFIER is EXPAND_SUM then when EXP is an addition
5611 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5612 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5613 products as above, or REG or MEM, or constant.
5614 Ordinarily in such cases we would output mul or add instructions
5615 and then return a pseudo reg containing the sum.
5617 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5618 it also marks a label as absolutely required (it can't be dead).
5619 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5620 This is used for outputting expressions used in initializers.
5622 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5623 with a constant address even if that address is not normally legitimate.
5624 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5627 expand_expr (exp
, target
, tmode
, modifier
)
5630 enum machine_mode tmode
;
5631 enum expand_modifier modifier
;
5633 register rtx op0
, op1
, temp
;
5634 tree type
= TREE_TYPE (exp
);
5635 int unsignedp
= TREE_UNSIGNED (type
);
5636 register enum machine_mode mode
;
5637 register enum tree_code code
= TREE_CODE (exp
);
5639 rtx subtarget
, original_target
;
5642 /* Used by check-memory-usage to make modifier read only. */
5643 enum expand_modifier ro_modifier
;
5645 /* Handle ERROR_MARK before anybody tries to access its type. */
5646 if (TREE_CODE (exp
) == ERROR_MARK
)
5648 op0
= CONST0_RTX (tmode
);
5654 mode
= TYPE_MODE (type
);
5655 /* Use subtarget as the target for operand 0 of a binary operation. */
5656 subtarget
= (target
!= 0 && GET_CODE (target
) == REG
? target
: 0);
5657 original_target
= target
;
5658 ignore
= (target
== const0_rtx
5659 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
5660 || code
== CONVERT_EXPR
|| code
== REFERENCE_EXPR
5661 || code
== COND_EXPR
)
5662 && TREE_CODE (type
) == VOID_TYPE
));
5664 /* Make a read-only version of the modifier. */
5665 if (modifier
== EXPAND_NORMAL
|| modifier
== EXPAND_SUM
5666 || modifier
== EXPAND_CONST_ADDRESS
|| modifier
== EXPAND_INITIALIZER
)
5667 ro_modifier
= modifier
;
5669 ro_modifier
= EXPAND_NORMAL
;
5671 /* Don't use hard regs as subtargets, because the combiner
5672 can only handle pseudo regs. */
5673 if (subtarget
&& REGNO (subtarget
) < FIRST_PSEUDO_REGISTER
)
5675 /* Avoid subtargets inside loops,
5676 since they hide some invariant expressions. */
5677 if (preserve_subexpressions_p ())
5680 /* If we are going to ignore this result, we need only do something
5681 if there is a side-effect somewhere in the expression. If there
5682 is, short-circuit the most common cases here. Note that we must
5683 not call expand_expr with anything but const0_rtx in case this
5684 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5688 if (! TREE_SIDE_EFFECTS (exp
))
5691 /* Ensure we reference a volatile object even if value is ignored, but
5692 don't do this if all we are doing is taking its address. */
5693 if (TREE_THIS_VOLATILE (exp
)
5694 && TREE_CODE (exp
) != FUNCTION_DECL
5695 && mode
!= VOIDmode
&& mode
!= BLKmode
5696 && modifier
!= EXPAND_CONST_ADDRESS
)
5698 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, ro_modifier
);
5699 if (GET_CODE (temp
) == MEM
)
5700 temp
= copy_to_reg (temp
);
5704 if (TREE_CODE_CLASS (code
) == '1' || code
== COMPONENT_REF
5705 || code
== INDIRECT_REF
|| code
== BUFFER_REF
)
5706 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
,
5707 VOIDmode
, ro_modifier
);
5708 else if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<'
5709 || code
== ARRAY_REF
)
5711 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, ro_modifier
);
5712 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, ro_modifier
);
5715 else if ((code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
5716 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
5717 /* If the second operand has no side effects, just evaluate
5719 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
,
5720 VOIDmode
, ro_modifier
);
5721 else if (code
== BIT_FIELD_REF
)
5723 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, ro_modifier
);
5724 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, ro_modifier
);
5725 expand_expr (TREE_OPERAND (exp
, 2), const0_rtx
, VOIDmode
, ro_modifier
);
5732 #ifdef MAX_INTEGER_COMPUTATION_MODE
5733 /* Only check stuff here if the mode we want is different from the mode
5734 of the expression; if it's the same, check_max_integer_computiation_mode
5735 will handle it. Do we really need to check this stuff at all? */
5738 && GET_MODE (target
) != mode
5739 && TREE_CODE (exp
) != INTEGER_CST
5740 && TREE_CODE (exp
) != PARM_DECL
5741 && TREE_CODE (exp
) != ARRAY_REF
5742 && TREE_CODE (exp
) != COMPONENT_REF
5743 && TREE_CODE (exp
) != BIT_FIELD_REF
5744 && TREE_CODE (exp
) != INDIRECT_REF
5745 && TREE_CODE (exp
) != CALL_EXPR
5746 && TREE_CODE (exp
) != VAR_DECL
5747 && TREE_CODE (exp
) != RTL_EXPR
)
5749 enum machine_mode mode
= GET_MODE (target
);
5751 if (GET_MODE_CLASS (mode
) == MODE_INT
5752 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5753 fatal ("unsupported wide integer operation");
5757 && TREE_CODE (exp
) != INTEGER_CST
5758 && TREE_CODE (exp
) != PARM_DECL
5759 && TREE_CODE (exp
) != ARRAY_REF
5760 && TREE_CODE (exp
) != COMPONENT_REF
5761 && TREE_CODE (exp
) != BIT_FIELD_REF
5762 && TREE_CODE (exp
) != INDIRECT_REF
5763 && TREE_CODE (exp
) != VAR_DECL
5764 && TREE_CODE (exp
) != CALL_EXPR
5765 && TREE_CODE (exp
) != RTL_EXPR
5766 && GET_MODE_CLASS (tmode
) == MODE_INT
5767 && tmode
> MAX_INTEGER_COMPUTATION_MODE
)
5768 fatal ("unsupported wide integer operation");
5770 check_max_integer_computation_mode (exp
);
5773 /* If will do cse, generate all results into pseudo registers
5774 since 1) that allows cse to find more things
5775 and 2) otherwise cse could produce an insn the machine
5778 if (! cse_not_expected
&& mode
!= BLKmode
&& target
5779 && (GET_CODE (target
) != REG
|| REGNO (target
) < FIRST_PSEUDO_REGISTER
))
5786 tree function
= decl_function_context (exp
);
5787 /* Handle using a label in a containing function. */
5788 if (function
!= current_function_decl
5789 && function
!= inline_function_decl
&& function
!= 0)
5791 struct function
*p
= find_function_data (function
);
5792 /* Allocate in the memory associated with the function
5793 that the label is in. */
5794 push_obstacks (p
->function_obstack
,
5795 p
->function_maybepermanent_obstack
);
5797 p
->expr
->x_forced_labels
5798 = gen_rtx_EXPR_LIST (VOIDmode
, label_rtx (exp
),
5799 p
->expr
->x_forced_labels
);
5804 if (modifier
== EXPAND_INITIALIZER
)
5805 forced_labels
= gen_rtx_EXPR_LIST (VOIDmode
,
5810 temp
= gen_rtx_MEM (FUNCTION_MODE
,
5811 gen_rtx_LABEL_REF (Pmode
, label_rtx (exp
)));
5812 if (function
!= current_function_decl
5813 && function
!= inline_function_decl
&& function
!= 0)
5814 LABEL_REF_NONLOCAL_P (XEXP (temp
, 0)) = 1;
5819 if (DECL_RTL (exp
) == 0)
5821 error_with_decl (exp
, "prior parameter's size depends on `%s'");
5822 return CONST0_RTX (mode
);
5825 /* ... fall through ... */
5828 /* If a static var's type was incomplete when the decl was written,
5829 but the type is complete now, lay out the decl now. */
5830 if (DECL_SIZE (exp
) == 0 && TYPE_SIZE (TREE_TYPE (exp
)) != 0
5831 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
5833 push_obstacks_nochange ();
5834 end_temporary_allocation ();
5835 layout_decl (exp
, 0);
5836 PUT_MODE (DECL_RTL (exp
), DECL_MODE (exp
));
5840 /* Although static-storage variables start off initialized, according to
5841 ANSI C, a memcpy could overwrite them with uninitialized values. So
5842 we check them too. This also lets us check for read-only variables
5843 accessed via a non-const declaration, in case it won't be detected
5844 any other way (e.g., in an embedded system or OS kernel without
5847 Aggregates are not checked here; they're handled elsewhere. */
5848 if (current_function
&& current_function_check_memory_usage
5850 && GET_CODE (DECL_RTL (exp
)) == MEM
5851 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp
)))
5853 enum memory_use_mode memory_usage
;
5854 memory_usage
= get_memory_usage_from_modifier (modifier
);
5856 if (memory_usage
!= MEMORY_USE_DONT
)
5857 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
5858 XEXP (DECL_RTL (exp
), 0), Pmode
,
5859 GEN_INT (int_size_in_bytes (type
)),
5860 TYPE_MODE (sizetype
),
5861 GEN_INT (memory_usage
),
5862 TYPE_MODE (integer_type_node
));
5865 /* ... fall through ... */
5869 if (DECL_RTL (exp
) == 0)
5872 /* Ensure variable marked as used even if it doesn't go through
5873 a parser. If it hasn't be used yet, write out an external
5875 if (! TREE_USED (exp
))
5877 assemble_external (exp
);
5878 TREE_USED (exp
) = 1;
5881 /* Show we haven't gotten RTL for this yet. */
5884 /* Handle variables inherited from containing functions. */
5885 context
= decl_function_context (exp
);
5887 /* We treat inline_function_decl as an alias for the current function
5888 because that is the inline function whose vars, types, etc.
5889 are being merged into the current function.
5890 See expand_inline_function. */
5892 if (context
!= 0 && context
!= current_function_decl
5893 && context
!= inline_function_decl
5894 /* If var is static, we don't need a static chain to access it. */
5895 && ! (GET_CODE (DECL_RTL (exp
)) == MEM
5896 && CONSTANT_P (XEXP (DECL_RTL (exp
), 0))))
5900 /* Mark as non-local and addressable. */
5901 DECL_NONLOCAL (exp
) = 1;
5902 if (DECL_NO_STATIC_CHAIN (current_function_decl
))
5904 mark_addressable (exp
);
5905 if (GET_CODE (DECL_RTL (exp
)) != MEM
)
5907 addr
= XEXP (DECL_RTL (exp
), 0);
5908 if (GET_CODE (addr
) == MEM
)
5909 addr
= gen_rtx_MEM (Pmode
,
5910 fix_lexical_addr (XEXP (addr
, 0), exp
));
5912 addr
= fix_lexical_addr (addr
, exp
);
5913 temp
= change_address (DECL_RTL (exp
), mode
, addr
);
5916 /* This is the case of an array whose size is to be determined
5917 from its initializer, while the initializer is still being parsed.
5920 else if (GET_CODE (DECL_RTL (exp
)) == MEM
5921 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) == REG
)
5922 temp
= change_address (DECL_RTL (exp
), GET_MODE (DECL_RTL (exp
)),
5923 XEXP (DECL_RTL (exp
), 0));
5925 /* If DECL_RTL is memory, we are in the normal case and either
5926 the address is not valid or it is not a register and -fforce-addr
5927 is specified, get the address into a register. */
5929 else if (GET_CODE (DECL_RTL (exp
)) == MEM
5930 && modifier
!= EXPAND_CONST_ADDRESS
5931 && modifier
!= EXPAND_SUM
5932 && modifier
!= EXPAND_INITIALIZER
5933 && (! memory_address_p (DECL_MODE (exp
),
5934 XEXP (DECL_RTL (exp
), 0))
5936 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) != REG
)))
5937 temp
= change_address (DECL_RTL (exp
), VOIDmode
,
5938 copy_rtx (XEXP (DECL_RTL (exp
), 0)));
5940 /* If we got something, return it. But first, set the alignment
5941 the address is a register. */
5944 if (GET_CODE (temp
) == MEM
&& GET_CODE (XEXP (temp
, 0)) == REG
)
5945 mark_reg_pointer (XEXP (temp
, 0),
5946 DECL_ALIGN (exp
) / BITS_PER_UNIT
);
5951 /* If the mode of DECL_RTL does not match that of the decl, it
5952 must be a promoted value. We return a SUBREG of the wanted mode,
5953 but mark it so that we know that it was already extended. */
5955 if (GET_CODE (DECL_RTL (exp
)) == REG
5956 && GET_MODE (DECL_RTL (exp
)) != mode
)
5958 /* Get the signedness used for this variable. Ensure we get the
5959 same mode we got when the variable was declared. */
5960 if (GET_MODE (DECL_RTL (exp
))
5961 != promote_mode (type
, DECL_MODE (exp
), &unsignedp
, 0))
5964 temp
= gen_rtx_SUBREG (mode
, DECL_RTL (exp
), 0);
5965 SUBREG_PROMOTED_VAR_P (temp
) = 1;
5966 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
5970 return DECL_RTL (exp
);
5973 return immed_double_const (TREE_INT_CST_LOW (exp
),
5974 TREE_INT_CST_HIGH (exp
),
5978 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
,
5979 EXPAND_MEMORY_USE_BAD
);
5982 /* If optimized, generate immediate CONST_DOUBLE
5983 which will be turned into memory by reload if necessary.
5985 We used to force a register so that loop.c could see it. But
5986 this does not allow gen_* patterns to perform optimizations with
5987 the constants. It also produces two insns in cases like "x = 1.0;".
5988 On most machines, floating-point constants are not permitted in
5989 many insns, so we'd end up copying it to a register in any case.
5991 Now, we do the copying in expand_binop, if appropriate. */
5992 return immed_real_const (exp
);
5996 if (! TREE_CST_RTL (exp
))
5997 output_constant_def (exp
);
5999 /* TREE_CST_RTL probably contains a constant address.
6000 On RISC machines where a constant address isn't valid,
6001 make some insns to get that address into a register. */
6002 if (GET_CODE (TREE_CST_RTL (exp
)) == MEM
6003 && modifier
!= EXPAND_CONST_ADDRESS
6004 && modifier
!= EXPAND_INITIALIZER
6005 && modifier
!= EXPAND_SUM
6006 && (! memory_address_p (mode
, XEXP (TREE_CST_RTL (exp
), 0))
6008 && GET_CODE (XEXP (TREE_CST_RTL (exp
), 0)) != REG
)))
6009 return change_address (TREE_CST_RTL (exp
), VOIDmode
,
6010 copy_rtx (XEXP (TREE_CST_RTL (exp
), 0)));
6011 return TREE_CST_RTL (exp
);
6013 case EXPR_WITH_FILE_LOCATION
:
6016 char *saved_input_filename
= input_filename
;
6017 int saved_lineno
= lineno
;
6018 input_filename
= EXPR_WFL_FILENAME (exp
);
6019 lineno
= EXPR_WFL_LINENO (exp
);
6020 if (EXPR_WFL_EMIT_LINE_NOTE (exp
))
6021 emit_line_note (input_filename
, lineno
);
6022 /* Possibly avoid switching back and force here */
6023 to_return
= expand_expr (EXPR_WFL_NODE (exp
), target
, tmode
, modifier
);
6024 input_filename
= saved_input_filename
;
6025 lineno
= saved_lineno
;
6030 context
= decl_function_context (exp
);
6032 /* If this SAVE_EXPR was at global context, assume we are an
6033 initialization function and move it into our context. */
6035 SAVE_EXPR_CONTEXT (exp
) = current_function_decl
;
6037 /* We treat inline_function_decl as an alias for the current function
6038 because that is the inline function whose vars, types, etc.
6039 are being merged into the current function.
6040 See expand_inline_function. */
6041 if (context
== current_function_decl
|| context
== inline_function_decl
)
6044 /* If this is non-local, handle it. */
6047 /* The following call just exists to abort if the context is
6048 not of a containing function. */
6049 find_function_data (context
);
6051 temp
= SAVE_EXPR_RTL (exp
);
6052 if (temp
&& GET_CODE (temp
) == REG
)
6054 put_var_into_stack (exp
);
6055 temp
= SAVE_EXPR_RTL (exp
);
6057 if (temp
== 0 || GET_CODE (temp
) != MEM
)
6059 return change_address (temp
, mode
,
6060 fix_lexical_addr (XEXP (temp
, 0), exp
));
6062 if (SAVE_EXPR_RTL (exp
) == 0)
6064 if (mode
== VOIDmode
)
6067 temp
= assign_temp (type
, 3, 0, 0);
6069 SAVE_EXPR_RTL (exp
) = temp
;
6070 if (!optimize
&& GET_CODE (temp
) == REG
)
6071 save_expr_regs
= gen_rtx_EXPR_LIST (VOIDmode
, temp
,
6074 /* If the mode of TEMP does not match that of the expression, it
6075 must be a promoted value. We pass store_expr a SUBREG of the
6076 wanted mode but mark it so that we know that it was already
6077 extended. Note that `unsignedp' was modified above in
6080 if (GET_CODE (temp
) == REG
&& GET_MODE (temp
) != mode
)
6082 temp
= gen_rtx_SUBREG (mode
, SAVE_EXPR_RTL (exp
), 0);
6083 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6084 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
6087 if (temp
== const0_rtx
)
6088 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6089 EXPAND_MEMORY_USE_BAD
);
6091 store_expr (TREE_OPERAND (exp
, 0), temp
, 0);
6093 TREE_USED (exp
) = 1;
6096 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6097 must be a promoted value. We return a SUBREG of the wanted mode,
6098 but mark it so that we know that it was already extended. */
6100 if (GET_CODE (SAVE_EXPR_RTL (exp
)) == REG
6101 && GET_MODE (SAVE_EXPR_RTL (exp
)) != mode
)
6103 /* Compute the signedness and make the proper SUBREG. */
6104 promote_mode (type
, mode
, &unsignedp
, 0);
6105 temp
= gen_rtx_SUBREG (mode
, SAVE_EXPR_RTL (exp
), 0);
6106 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6107 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
6111 return SAVE_EXPR_RTL (exp
);
6116 temp
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
6117 TREE_OPERAND (exp
, 0) = unsave_expr_now (TREE_OPERAND (exp
, 0));
6121 case PLACEHOLDER_EXPR
:
6123 tree placeholder_expr
;
6125 /* If there is an object on the head of the placeholder list,
6126 see if some object in it of type TYPE or a pointer to it. For
6127 further information, see tree.def. */
6128 for (placeholder_expr
= placeholder_list
;
6129 placeholder_expr
!= 0;
6130 placeholder_expr
= TREE_CHAIN (placeholder_expr
))
6132 tree need_type
= TYPE_MAIN_VARIANT (type
);
6134 tree old_list
= placeholder_list
;
6137 /* Find the outermost reference that is of the type we want.
6138 If none, see if any object has a type that is a pointer to
6139 the type we want. */
6140 for (elt
= TREE_PURPOSE (placeholder_expr
);
6141 elt
!= 0 && object
== 0;
6143 = ((TREE_CODE (elt
) == COMPOUND_EXPR
6144 || TREE_CODE (elt
) == COND_EXPR
)
6145 ? TREE_OPERAND (elt
, 1)
6146 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6147 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6148 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6149 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6150 ? TREE_OPERAND (elt
, 0) : 0))
6151 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt
)) == need_type
)
6154 for (elt
= TREE_PURPOSE (placeholder_expr
);
6155 elt
!= 0 && object
== 0;
6157 = ((TREE_CODE (elt
) == COMPOUND_EXPR
6158 || TREE_CODE (elt
) == COND_EXPR
)
6159 ? TREE_OPERAND (elt
, 1)
6160 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6161 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6162 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6163 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6164 ? TREE_OPERAND (elt
, 0) : 0))
6165 if (POINTER_TYPE_P (TREE_TYPE (elt
))
6166 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt
)))
6168 object
= build1 (INDIRECT_REF
, need_type
, elt
);
6172 /* Expand this object skipping the list entries before
6173 it was found in case it is also a PLACEHOLDER_EXPR.
6174 In that case, we want to translate it using subsequent
6176 placeholder_list
= TREE_CHAIN (placeholder_expr
);
6177 temp
= expand_expr (object
, original_target
, tmode
,
6179 placeholder_list
= old_list
;
6185 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6188 case WITH_RECORD_EXPR
:
6189 /* Put the object on the placeholder list, expand our first operand,
6190 and pop the list. */
6191 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
6193 target
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
6194 tmode
, ro_modifier
);
6195 placeholder_list
= TREE_CHAIN (placeholder_list
);
6199 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == LABEL_DECL
)
6200 expand_goto (TREE_OPERAND (exp
, 0));
6202 expand_computed_goto (TREE_OPERAND (exp
, 0));
6206 expand_exit_loop_if_false (NULL_PTR
,
6207 invert_truthvalue (TREE_OPERAND (exp
, 0)));
6210 case LABELED_BLOCK_EXPR
:
6211 if (LABELED_BLOCK_BODY (exp
))
6212 expand_expr_stmt (LABELED_BLOCK_BODY (exp
));
6213 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp
)));
6216 case EXIT_BLOCK_EXPR
:
6217 if (EXIT_BLOCK_RETURN (exp
))
6218 sorry ("returned value in block_exit_expr");
6219 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp
)));
6224 expand_start_loop (1);
6225 expand_expr_stmt (TREE_OPERAND (exp
, 0));
6233 tree vars
= TREE_OPERAND (exp
, 0);
6234 int vars_need_expansion
= 0;
6236 /* Need to open a binding contour here because
6237 if there are any cleanups they must be contained here. */
6238 expand_start_bindings (2);
6240 /* Mark the corresponding BLOCK for output in its proper place. */
6241 if (TREE_OPERAND (exp
, 2) != 0
6242 && ! TREE_USED (TREE_OPERAND (exp
, 2)))
6243 insert_block (TREE_OPERAND (exp
, 2));
6245 /* If VARS have not yet been expanded, expand them now. */
6248 if (DECL_RTL (vars
) == 0)
6250 vars_need_expansion
= 1;
6253 expand_decl_init (vars
);
6254 vars
= TREE_CHAIN (vars
);
6257 temp
= expand_expr (TREE_OPERAND (exp
, 1), target
, tmode
, ro_modifier
);
6259 expand_end_bindings (TREE_OPERAND (exp
, 0), 0, 0);
6265 if (RTL_EXPR_SEQUENCE (exp
))
6267 if (RTL_EXPR_SEQUENCE (exp
) == const0_rtx
)
6269 emit_insns (RTL_EXPR_SEQUENCE (exp
));
6270 RTL_EXPR_SEQUENCE (exp
) = const0_rtx
;
6272 preserve_rtl_expr_result (RTL_EXPR_RTL (exp
));
6273 free_temps_for_rtl_expr (exp
);
6274 return RTL_EXPR_RTL (exp
);
6277 /* If we don't need the result, just ensure we evaluate any
6282 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
6283 expand_expr (TREE_VALUE (elt
), const0_rtx
, VOIDmode
,
6284 EXPAND_MEMORY_USE_BAD
);
6288 /* All elts simple constants => refer to a constant in memory. But
6289 if this is a non-BLKmode mode, let it store a field at a time
6290 since that should make a CONST_INT or CONST_DOUBLE when we
6291 fold. Likewise, if we have a target we can use, it is best to
6292 store directly into the target unless the type is large enough
6293 that memcpy will be used. If we are making an initializer and
6294 all operands are constant, put it in memory as well. */
6295 else if ((TREE_STATIC (exp
)
6296 && ((mode
== BLKmode
6297 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
6298 || TREE_ADDRESSABLE (exp
)
6299 || (TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
6300 && (!MOVE_BY_PIECES_P
6301 (TREE_INT_CST_LOW (TYPE_SIZE (type
))/BITS_PER_UNIT
,
6302 TYPE_ALIGN (type
) / BITS_PER_UNIT
))
6303 && ! mostly_zeros_p (exp
))))
6304 || (modifier
== EXPAND_INITIALIZER
&& TREE_CONSTANT (exp
)))
6306 rtx constructor
= output_constant_def (exp
);
6307 if (modifier
!= EXPAND_CONST_ADDRESS
6308 && modifier
!= EXPAND_INITIALIZER
6309 && modifier
!= EXPAND_SUM
6310 && (! memory_address_p (GET_MODE (constructor
),
6311 XEXP (constructor
, 0))
6313 && GET_CODE (XEXP (constructor
, 0)) != REG
)))
6314 constructor
= change_address (constructor
, VOIDmode
,
6315 XEXP (constructor
, 0));
6321 /* Handle calls that pass values in multiple non-contiguous
6322 locations. The Irix 6 ABI has examples of this. */
6323 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
6324 || GET_CODE (target
) == PARALLEL
)
6326 if (mode
!= BLKmode
&& ! TREE_ADDRESSABLE (exp
))
6327 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
6329 target
= assign_temp (type
, 0, 1, 1);
6332 if (TREE_READONLY (exp
))
6334 if (GET_CODE (target
) == MEM
)
6335 target
= copy_rtx (target
);
6337 RTX_UNCHANGING_P (target
) = 1;
6340 store_constructor (exp
, target
, TYPE_ALIGN (TREE_TYPE (exp
)), 0);
6346 tree exp1
= TREE_OPERAND (exp
, 0);
6349 tree string
= string_constant (exp1
, &index
);
6352 /* Try to optimize reads from const strings. */
6354 && TREE_CODE (string
) == STRING_CST
6355 && TREE_CODE (index
) == INTEGER_CST
6356 && !TREE_INT_CST_HIGH (index
)
6357 && (i
= TREE_INT_CST_LOW (index
)) < TREE_STRING_LENGTH (string
)
6358 && GET_MODE_CLASS (mode
) == MODE_INT
6359 && GET_MODE_SIZE (mode
) == 1
6360 && modifier
!= EXPAND_MEMORY_USE_WO
)
6361 return GEN_INT (TREE_STRING_POINTER (string
)[i
]);
6363 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
6364 op0
= memory_address (mode
, op0
);
6366 if (current_function
&& current_function_check_memory_usage
6367 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp
)))
6369 enum memory_use_mode memory_usage
;
6370 memory_usage
= get_memory_usage_from_modifier (modifier
);
6372 if (memory_usage
!= MEMORY_USE_DONT
)
6374 in_check_memory_usage
= 1;
6375 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
6377 GEN_INT (int_size_in_bytes (type
)),
6378 TYPE_MODE (sizetype
),
6379 GEN_INT (memory_usage
),
6380 TYPE_MODE (integer_type_node
));
6381 in_check_memory_usage
= 0;
6385 temp
= gen_rtx_MEM (mode
, op0
);
6386 /* If address was computed by addition,
6387 mark this as an element of an aggregate. */
6388 if (TREE_CODE (exp1
) == PLUS_EXPR
6389 || (TREE_CODE (exp1
) == SAVE_EXPR
6390 && TREE_CODE (TREE_OPERAND (exp1
, 0)) == PLUS_EXPR
)
6391 || AGGREGATE_TYPE_P (TREE_TYPE (exp
))
6392 || (TREE_CODE (exp1
) == ADDR_EXPR
6393 && (exp2
= TREE_OPERAND (exp1
, 0))
6394 && AGGREGATE_TYPE_P (TREE_TYPE (exp2
))))
6395 MEM_SET_IN_STRUCT_P (temp
, 1);
6397 MEM_VOLATILE_P (temp
) = TREE_THIS_VOLATILE (exp
) | flag_volatile
;
6398 MEM_ALIAS_SET (temp
) = get_alias_set (exp
);
6400 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6401 here, because, in C and C++, the fact that a location is accessed
6402 through a pointer to const does not mean that the value there can
6403 never change. Languages where it can never change should
6404 also set TREE_STATIC. */
6405 RTX_UNCHANGING_P (temp
) = TREE_READONLY (exp
) & TREE_STATIC (exp
);
6407 /* If we are writing to this object and its type is a record with
6408 readonly fields, we must mark it as readonly so it will
6409 conflict with readonly references to those fields. */
6410 if (modifier
== EXPAND_MEMORY_USE_WO
6411 && TREE_CODE (type
) == RECORD_TYPE
&& readonly_fields_p (type
))
6412 RTX_UNCHANGING_P (temp
) = 1;
6418 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
6422 tree array
= TREE_OPERAND (exp
, 0);
6423 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
6424 tree low_bound
= domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
6425 tree index
= TREE_OPERAND (exp
, 1);
6426 tree index_type
= TREE_TYPE (index
);
6429 /* Optimize the special-case of a zero lower bound.
6431 We convert the low_bound to sizetype to avoid some problems
6432 with constant folding. (E.g. suppose the lower bound is 1,
6433 and its mode is QI. Without the conversion, (ARRAY
6434 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6435 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
6437 But sizetype isn't quite right either (especially if
6438 the lowbound is negative). FIXME */
6440 if (! integer_zerop (low_bound
))
6441 index
= fold (build (MINUS_EXPR
, index_type
, index
,
6442 convert (sizetype
, low_bound
)));
6444 /* Fold an expression like: "foo"[2].
6445 This is not done in fold so it won't happen inside &.
6446 Don't fold if this is for wide characters since it's too
6447 difficult to do correctly and this is a very rare case. */
6449 if (TREE_CODE (array
) == STRING_CST
6450 && TREE_CODE (index
) == INTEGER_CST
6451 && !TREE_INT_CST_HIGH (index
)
6452 && (i
= TREE_INT_CST_LOW (index
)) < TREE_STRING_LENGTH (array
)
6453 && GET_MODE_CLASS (mode
) == MODE_INT
6454 && GET_MODE_SIZE (mode
) == 1)
6455 return GEN_INT (TREE_STRING_POINTER (array
)[i
]);
6457 /* If this is a constant index into a constant array,
6458 just get the value from the array. Handle both the cases when
6459 we have an explicit constructor and when our operand is a variable
6460 that was declared const. */
6462 if (TREE_CODE (array
) == CONSTRUCTOR
&& ! TREE_SIDE_EFFECTS (array
))
6464 if (TREE_CODE (index
) == INTEGER_CST
6465 && TREE_INT_CST_HIGH (index
) == 0)
6467 tree elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0));
6469 i
= TREE_INT_CST_LOW (index
);
6471 elem
= TREE_CHAIN (elem
);
6473 return expand_expr (fold (TREE_VALUE (elem
)), target
,
6474 tmode
, ro_modifier
);
6478 else if (optimize
>= 1
6479 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
6480 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
6481 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
)
6483 if (TREE_CODE (index
) == INTEGER_CST
)
6485 tree init
= DECL_INITIAL (array
);
6487 i
= TREE_INT_CST_LOW (index
);
6488 if (TREE_CODE (init
) == CONSTRUCTOR
)
6490 tree elem
= CONSTRUCTOR_ELTS (init
);
6493 && !tree_int_cst_equal (TREE_PURPOSE (elem
), index
))
6494 elem
= TREE_CHAIN (elem
);
6496 return expand_expr (fold (TREE_VALUE (elem
)), target
,
6497 tmode
, ro_modifier
);
6499 else if (TREE_CODE (init
) == STRING_CST
6500 && TREE_INT_CST_HIGH (index
) == 0
6501 && (TREE_INT_CST_LOW (index
)
6502 < TREE_STRING_LENGTH (init
)))
6504 (TREE_STRING_POINTER
6505 (init
)[TREE_INT_CST_LOW (index
)]));
6510 /* ... fall through ... */
6514 /* If the operand is a CONSTRUCTOR, we can just extract the
6515 appropriate field if it is present. Don't do this if we have
6516 already written the data since we want to refer to that copy
6517 and varasm.c assumes that's what we'll do. */
6518 if (code
!= ARRAY_REF
6519 && TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
6520 && TREE_CST_RTL (TREE_OPERAND (exp
, 0)) == 0)
6524 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
6525 elt
= TREE_CHAIN (elt
))
6526 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1)
6527 /* We can normally use the value of the field in the
6528 CONSTRUCTOR. However, if this is a bitfield in
6529 an integral mode that we can fit in a HOST_WIDE_INT,
6530 we must mask only the number of bits in the bitfield,
6531 since this is done implicitly by the constructor. If
6532 the bitfield does not meet either of those conditions,
6533 we can't do this optimization. */
6534 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt
))
6535 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt
)))
6537 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt
)))
6538 <= HOST_BITS_PER_WIDE_INT
))))
6540 op0
= expand_expr (TREE_VALUE (elt
), target
, tmode
, modifier
);
6541 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
)))
6543 int bitsize
= DECL_FIELD_SIZE (TREE_PURPOSE (elt
));
6545 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt
))))
6547 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
6548 op0
= expand_and (op0
, op1
, target
);
6552 enum machine_mode imode
6553 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt
)));
6555 = build_int_2 (GET_MODE_BITSIZE (imode
) - bitsize
,
6558 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
6560 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
6570 enum machine_mode mode1
;
6576 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
6577 &mode1
, &unsignedp
, &volatilep
,
6580 /* If we got back the original object, something is wrong. Perhaps
6581 we are evaluating an expression too early. In any event, don't
6582 infinitely recurse. */
6586 /* If TEM's type is a union of variable size, pass TARGET to the inner
6587 computation, since it will need a temporary and TARGET is known
6588 to have to do. This occurs in unchecked conversion in Ada. */
6590 op0
= expand_expr (tem
,
6591 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
6592 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
6594 ? target
: NULL_RTX
),
6596 (modifier
== EXPAND_INITIALIZER
6597 || modifier
== EXPAND_CONST_ADDRESS
)
6598 ? modifier
: EXPAND_NORMAL
);
6600 /* If this is a constant, put it into a register if it is a
6601 legitimate constant and OFFSET is 0 and memory if it isn't. */
6602 if (CONSTANT_P (op0
))
6604 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
6605 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
)
6607 op0
= force_reg (mode
, op0
);
6609 op0
= validize_mem (force_const_mem (mode
, op0
));
6614 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
6616 /* If this object is in memory, put it into a register.
6617 This case can't occur in C, but can in Ada if we have
6618 unchecked conversion of an expression from a scalar type to
6619 an array or record type. */
6620 if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
6621 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
)
6623 rtx memloc
= assign_temp (TREE_TYPE (tem
), 1, 1, 1);
6625 mark_temp_addr_taken (memloc
);
6626 emit_move_insn (memloc
, op0
);
6630 if (GET_CODE (op0
) != MEM
)
6633 if (GET_MODE (offset_rtx
) != ptr_mode
)
6635 #ifdef POINTERS_EXTEND_UNSIGNED
6636 offset_rtx
= convert_memory_address (ptr_mode
, offset_rtx
);
6638 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
6642 /* A constant address in OP0 can have VOIDmode, we must not try
6643 to call force_reg for that case. Avoid that case. */
6644 if (GET_CODE (op0
) == MEM
6645 && GET_MODE (op0
) == BLKmode
6646 && GET_MODE (XEXP (op0
, 0)) != VOIDmode
6648 && (bitpos
% bitsize
) == 0
6649 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
6650 && (alignment
* BITS_PER_UNIT
) == GET_MODE_ALIGNMENT (mode1
))
6652 rtx temp
= change_address (op0
, mode1
,
6653 plus_constant (XEXP (op0
, 0),
6656 if (GET_CODE (XEXP (temp
, 0)) == REG
)
6659 op0
= change_address (op0
, mode1
,
6660 force_reg (GET_MODE (XEXP (temp
, 0)),
6666 op0
= change_address (op0
, VOIDmode
,
6667 gen_rtx_PLUS (ptr_mode
, XEXP (op0
, 0),
6668 force_reg (ptr_mode
,
6672 /* Don't forget about volatility even if this is a bitfield. */
6673 if (GET_CODE (op0
) == MEM
&& volatilep
&& ! MEM_VOLATILE_P (op0
))
6675 op0
= copy_rtx (op0
);
6676 MEM_VOLATILE_P (op0
) = 1;
6679 /* Check the access. */
6680 if (current_function_check_memory_usage
&& GET_CODE (op0
) == MEM
)
6682 enum memory_use_mode memory_usage
;
6683 memory_usage
= get_memory_usage_from_modifier (modifier
);
6685 if (memory_usage
!= MEMORY_USE_DONT
)
6690 to
= plus_constant (XEXP (op0
, 0), (bitpos
/ BITS_PER_UNIT
));
6691 size
= (bitpos
% BITS_PER_UNIT
) + bitsize
+ BITS_PER_UNIT
- 1;
6693 /* Check the access right of the pointer. */
6694 if (size
> BITS_PER_UNIT
)
6695 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
6697 GEN_INT (size
/ BITS_PER_UNIT
),
6698 TYPE_MODE (sizetype
),
6699 GEN_INT (memory_usage
),
6700 TYPE_MODE (integer_type_node
));
6704 /* In cases where an aligned union has an unaligned object
6705 as a field, we might be extracting a BLKmode value from
6706 an integer-mode (e.g., SImode) object. Handle this case
6707 by doing the extract into an object as wide as the field
6708 (which we know to be the width of a basic mode), then
6709 storing into memory, and changing the mode to BLKmode.
6710 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6711 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6712 if (mode1
== VOIDmode
6713 || GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
6714 || (modifier
!= EXPAND_CONST_ADDRESS
6715 && modifier
!= EXPAND_INITIALIZER
6716 && ((mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
6717 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
6718 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
6719 /* If the field isn't aligned enough to fetch as a memref,
6720 fetch it as a bit field. */
6721 || (mode1
!= BLKmode
&& SLOW_UNALIGNED_ACCESS
6722 && ((TYPE_ALIGN (TREE_TYPE (tem
))
6723 < (unsigned int) GET_MODE_ALIGNMENT (mode
))
6724 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0)))))
6725 || (modifier
!= EXPAND_CONST_ADDRESS
6726 && modifier
!= EXPAND_INITIALIZER
6728 && SLOW_UNALIGNED_ACCESS
6729 && (TYPE_ALIGN (type
) > alignment
* BITS_PER_UNIT
6730 || bitpos
% TYPE_ALIGN (type
) != 0)))
6732 enum machine_mode ext_mode
= mode
;
6734 if (ext_mode
== BLKmode
6735 && ! (target
!= 0 && GET_CODE (op0
) == MEM
6736 && GET_CODE (target
) == MEM
6737 && bitpos
% BITS_PER_UNIT
== 0))
6738 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
6740 if (ext_mode
== BLKmode
)
6742 /* In this case, BITPOS must start at a byte boundary and
6743 TARGET, if specified, must be a MEM. */
6744 if (GET_CODE (op0
) != MEM
6745 || (target
!= 0 && GET_CODE (target
) != MEM
)
6746 || bitpos
% BITS_PER_UNIT
!= 0)
6749 op0
= change_address (op0
, VOIDmode
,
6750 plus_constant (XEXP (op0
, 0),
6751 bitpos
/ BITS_PER_UNIT
));
6753 target
= assign_temp (type
, 0, 1, 1);
6755 emit_block_move (target
, op0
,
6756 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
6763 op0
= validize_mem (op0
);
6765 if (GET_CODE (op0
) == MEM
&& GET_CODE (XEXP (op0
, 0)) == REG
)
6766 mark_reg_pointer (XEXP (op0
, 0), alignment
);
6768 op0
= extract_bit_field (op0
, bitsize
, bitpos
,
6769 unsignedp
, target
, ext_mode
, ext_mode
,
6771 int_size_in_bytes (TREE_TYPE (tem
)));
6773 /* If the result is a record type and BITSIZE is narrower than
6774 the mode of OP0, an integral mode, and this is a big endian
6775 machine, we must put the field into the high-order bits. */
6776 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
6777 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
6778 && bitsize
< GET_MODE_BITSIZE (GET_MODE (op0
)))
6779 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
6780 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
6784 if (mode
== BLKmode
)
6786 rtx
new = assign_stack_temp (ext_mode
,
6787 bitsize
/ BITS_PER_UNIT
, 0);
6789 emit_move_insn (new, op0
);
6790 op0
= copy_rtx (new);
6791 PUT_MODE (op0
, BLKmode
);
6792 MEM_SET_IN_STRUCT_P (op0
, 1);
6798 /* If the result is BLKmode, use that to access the object
6800 if (mode
== BLKmode
)
6803 /* Get a reference to just this component. */
6804 if (modifier
== EXPAND_CONST_ADDRESS
6805 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
6806 op0
= gen_rtx_MEM (mode1
, plus_constant (XEXP (op0
, 0),
6807 (bitpos
/ BITS_PER_UNIT
)));
6809 op0
= change_address (op0
, mode1
,
6810 plus_constant (XEXP (op0
, 0),
6811 (bitpos
/ BITS_PER_UNIT
)));
6813 if (GET_CODE (op0
) == MEM
)
6814 MEM_ALIAS_SET (op0
) = get_alias_set (exp
);
6816 if (GET_CODE (XEXP (op0
, 0)) == REG
)
6817 mark_reg_pointer (XEXP (op0
, 0), alignment
);
6819 MEM_SET_IN_STRUCT_P (op0
, 1);
6820 MEM_VOLATILE_P (op0
) |= volatilep
;
6821 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
6822 || modifier
== EXPAND_CONST_ADDRESS
6823 || modifier
== EXPAND_INITIALIZER
)
6825 else if (target
== 0)
6826 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
6828 convert_move (target
, op0
, unsignedp
);
6832 /* Intended for a reference to a buffer of a file-object in Pascal.
6833 But it's not certain that a special tree code will really be
6834 necessary for these. INDIRECT_REF might work for them. */
6840 /* Pascal set IN expression.
6843 rlo = set_low - (set_low%bits_per_word);
6844 the_word = set [ (index - rlo)/bits_per_word ];
6845 bit_index = index % bits_per_word;
6846 bitmask = 1 << bit_index;
6847 return !!(the_word & bitmask); */
6849 tree set
= TREE_OPERAND (exp
, 0);
6850 tree index
= TREE_OPERAND (exp
, 1);
6851 int iunsignedp
= TREE_UNSIGNED (TREE_TYPE (index
));
6852 tree set_type
= TREE_TYPE (set
);
6853 tree set_low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (set_type
));
6854 tree set_high_bound
= TYPE_MAX_VALUE (TYPE_DOMAIN (set_type
));
6855 rtx index_val
= expand_expr (index
, 0, VOIDmode
, 0);
6856 rtx lo_r
= expand_expr (set_low_bound
, 0, VOIDmode
, 0);
6857 rtx hi_r
= expand_expr (set_high_bound
, 0, VOIDmode
, 0);
6858 rtx setval
= expand_expr (set
, 0, VOIDmode
, 0);
6859 rtx setaddr
= XEXP (setval
, 0);
6860 enum machine_mode index_mode
= TYPE_MODE (TREE_TYPE (index
));
6862 rtx diff
, quo
, rem
, addr
, bit
, result
;
6864 preexpand_calls (exp
);
6866 /* If domain is empty, answer is no. Likewise if index is constant
6867 and out of bounds. */
6868 if (((TREE_CODE (set_high_bound
) == INTEGER_CST
6869 && TREE_CODE (set_low_bound
) == INTEGER_CST
6870 && tree_int_cst_lt (set_high_bound
, set_low_bound
))
6871 || (TREE_CODE (index
) == INTEGER_CST
6872 && TREE_CODE (set_low_bound
) == INTEGER_CST
6873 && tree_int_cst_lt (index
, set_low_bound
))
6874 || (TREE_CODE (set_high_bound
) == INTEGER_CST
6875 && TREE_CODE (index
) == INTEGER_CST
6876 && tree_int_cst_lt (set_high_bound
, index
))))
6880 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
6882 /* If we get here, we have to generate the code for both cases
6883 (in range and out of range). */
6885 op0
= gen_label_rtx ();
6886 op1
= gen_label_rtx ();
6888 if (! (GET_CODE (index_val
) == CONST_INT
6889 && GET_CODE (lo_r
) == CONST_INT
))
6891 emit_cmp_and_jump_insns (index_val
, lo_r
, LT
, NULL_RTX
,
6892 GET_MODE (index_val
), iunsignedp
, 0, op1
);
6895 if (! (GET_CODE (index_val
) == CONST_INT
6896 && GET_CODE (hi_r
) == CONST_INT
))
6898 emit_cmp_and_jump_insns (index_val
, hi_r
, GT
, NULL_RTX
,
6899 GET_MODE (index_val
), iunsignedp
, 0, op1
);
6902 /* Calculate the element number of bit zero in the first word
6904 if (GET_CODE (lo_r
) == CONST_INT
)
6905 rlow
= GEN_INT (INTVAL (lo_r
)
6906 & ~ ((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
));
6908 rlow
= expand_binop (index_mode
, and_optab
, lo_r
,
6909 GEN_INT (~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
)),
6910 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
6912 diff
= expand_binop (index_mode
, sub_optab
, index_val
, rlow
,
6913 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
6915 quo
= expand_divmod (0, TRUNC_DIV_EXPR
, index_mode
, diff
,
6916 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
6917 rem
= expand_divmod (1, TRUNC_MOD_EXPR
, index_mode
, index_val
,
6918 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
6920 addr
= memory_address (byte_mode
,
6921 expand_binop (index_mode
, add_optab
, diff
,
6922 setaddr
, NULL_RTX
, iunsignedp
,
6925 /* Extract the bit we want to examine */
6926 bit
= expand_shift (RSHIFT_EXPR
, byte_mode
,
6927 gen_rtx_MEM (byte_mode
, addr
),
6928 make_tree (TREE_TYPE (index
), rem
),
6930 result
= expand_binop (byte_mode
, and_optab
, bit
, const1_rtx
,
6931 GET_MODE (target
) == byte_mode
? target
: 0,
6932 1, OPTAB_LIB_WIDEN
);
6934 if (result
!= target
)
6935 convert_move (target
, result
, 1);
6937 /* Output the code to handle the out-of-range case. */
6940 emit_move_insn (target
, const0_rtx
);
6945 case WITH_CLEANUP_EXPR
:
6946 if (RTL_EXPR_RTL (exp
) == 0)
6949 = expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, ro_modifier
);
6950 expand_decl_cleanup (NULL_TREE
, TREE_OPERAND (exp
, 2));
6952 /* That's it for this cleanup. */
6953 TREE_OPERAND (exp
, 2) = 0;
6955 return RTL_EXPR_RTL (exp
);
6957 case CLEANUP_POINT_EXPR
:
6959 /* Start a new binding layer that will keep track of all cleanup
6960 actions to be performed. */
6961 expand_start_bindings (2);
6963 target_temp_slot_level
= temp_slot_level
;
6965 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, ro_modifier
);
6966 /* If we're going to use this value, load it up now. */
6968 op0
= force_not_mem (op0
);
6969 preserve_temp_slots (op0
);
6970 expand_end_bindings (NULL_TREE
, 0, 0);
6975 /* Check for a built-in function. */
6976 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
6977 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
6979 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
6980 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
6982 /* If this call was expanded already by preexpand_calls,
6983 just return the result we got. */
6984 if (CALL_EXPR_RTL (exp
) != 0)
6985 return CALL_EXPR_RTL (exp
);
6987 return expand_call (exp
, target
, ignore
);
6989 case NON_LVALUE_EXPR
:
6992 case REFERENCE_EXPR
:
6993 if (TREE_CODE (type
) == UNION_TYPE
)
6995 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
6997 /* If both input and output are BLKmode, this conversion
6998 isn't actually doing anything unless we need to make the
6999 alignment stricter. */
7000 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
7001 && (TYPE_ALIGN (type
) <= TYPE_ALIGN (valtype
)
7002 || TYPE_ALIGN (type
) >= BIGGEST_ALIGNMENT
))
7003 return expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
,
7008 if (mode
!= BLKmode
)
7009 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7011 target
= assign_temp (type
, 0, 1, 1);
7014 if (GET_CODE (target
) == MEM
)
7015 /* Store data into beginning of memory target. */
7016 store_expr (TREE_OPERAND (exp
, 0),
7017 change_address (target
, TYPE_MODE (valtype
), 0), 0);
7019 else if (GET_CODE (target
) == REG
)
7020 /* Store this field into a union of the proper type. */
7021 store_field (target
,
7022 MIN ((int_size_in_bytes (TREE_TYPE
7023 (TREE_OPERAND (exp
, 0)))
7025 GET_MODE_BITSIZE (mode
)),
7026 0, TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
7027 VOIDmode
, 0, 1, int_size_in_bytes (type
), 0);
7031 /* Return the entire union. */
7035 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7037 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
7040 /* If the signedness of the conversion differs and OP0 is
7041 a promoted SUBREG, clear that indication since we now
7042 have to do the proper extension. */
7043 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
7044 && GET_CODE (op0
) == SUBREG
)
7045 SUBREG_PROMOTED_VAR_P (op0
) = 0;
7050 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, 0);
7051 if (GET_MODE (op0
) == mode
)
7054 /* If OP0 is a constant, just convert it into the proper mode. */
7055 if (CONSTANT_P (op0
))
7057 convert_modes (mode
, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
7058 op0
, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7060 if (modifier
== EXPAND_INITIALIZER
)
7061 return gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
7065 convert_to_mode (mode
, op0
,
7066 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7068 convert_move (target
, op0
,
7069 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7073 /* We come here from MINUS_EXPR when the second operand is a
7076 this_optab
= add_optab
;
7078 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7079 something else, make sure we add the register to the constant and
7080 then to the other thing. This case can occur during strength
7081 reduction and doing it this way will produce better code if the
7082 frame pointer or argument pointer is eliminated.
7084 fold-const.c will ensure that the constant is always in the inner
7085 PLUS_EXPR, so the only case we need to do anything about is if
7086 sp, ap, or fp is our second argument, in which case we must swap
7087 the innermost first argument and our second argument. */
7089 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
7090 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
7091 && TREE_CODE (TREE_OPERAND (exp
, 1)) == RTL_EXPR
7092 && (RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
7093 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
7094 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
7096 tree t
= TREE_OPERAND (exp
, 1);
7098 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7099 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
7102 /* If the result is to be ptr_mode and we are adding an integer to
7103 something, we might be forming a constant. So try to use
7104 plus_constant. If it produces a sum and we can't accept it,
7105 use force_operand. This allows P = &ARR[const] to generate
7106 efficient code on machines where a SYMBOL_REF is not a valid
7109 If this is an EXPAND_SUM call, always return the sum. */
7110 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
7111 || mode
== ptr_mode
)
7113 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
7114 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
7115 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
7119 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
7121 /* Use immed_double_const to ensure that the constant is
7122 truncated according to the mode of OP1, then sign extended
7123 to a HOST_WIDE_INT. Using the constant directly can result
7124 in non-canonical RTL in a 64x32 cross compile. */
7126 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)),
7128 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))));
7129 op1
= plus_constant (op1
, INTVAL (constant_part
));
7130 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7131 op1
= force_operand (op1
, target
);
7135 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7136 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
7137 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
7141 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7143 if (! CONSTANT_P (op0
))
7145 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7146 VOIDmode
, modifier
);
7147 /* Don't go to both_summands if modifier
7148 says it's not right to return a PLUS. */
7149 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7153 /* Use immed_double_const to ensure that the constant is
7154 truncated according to the mode of OP1, then sign extended
7155 to a HOST_WIDE_INT. Using the constant directly can result
7156 in non-canonical RTL in a 64x32 cross compile. */
7158 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)),
7160 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7161 op0
= plus_constant (op0
, INTVAL (constant_part
));
7162 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7163 op0
= force_operand (op0
, target
);
7168 /* No sense saving up arithmetic to be done
7169 if it's all in the wrong mode to form part of an address.
7170 And force_operand won't know whether to sign-extend or
7172 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7173 || mode
!= ptr_mode
)
7176 preexpand_calls (exp
);
7177 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7180 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, ro_modifier
);
7181 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, ro_modifier
);
7184 /* Make sure any term that's a sum with a constant comes last. */
7185 if (GET_CODE (op0
) == PLUS
7186 && CONSTANT_P (XEXP (op0
, 1)))
7192 /* If adding to a sum including a constant,
7193 associate it to put the constant outside. */
7194 if (GET_CODE (op1
) == PLUS
7195 && CONSTANT_P (XEXP (op1
, 1)))
7197 rtx constant_term
= const0_rtx
;
7199 temp
= simplify_binary_operation (PLUS
, mode
, XEXP (op1
, 0), op0
);
7202 /* Ensure that MULT comes first if there is one. */
7203 else if (GET_CODE (op0
) == MULT
)
7204 op0
= gen_rtx_PLUS (mode
, op0
, XEXP (op1
, 0));
7206 op0
= gen_rtx_PLUS (mode
, XEXP (op1
, 0), op0
);
7208 /* Let's also eliminate constants from op0 if possible. */
7209 op0
= eliminate_constant_term (op0
, &constant_term
);
7211 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7212 their sum should be a constant. Form it into OP1, since the
7213 result we want will then be OP0 + OP1. */
7215 temp
= simplify_binary_operation (PLUS
, mode
, constant_term
,
7220 op1
= gen_rtx_PLUS (mode
, constant_term
, XEXP (op1
, 1));
7223 /* Put a constant term last and put a multiplication first. */
7224 if (CONSTANT_P (op0
) || GET_CODE (op1
) == MULT
)
7225 temp
= op1
, op1
= op0
, op0
= temp
;
7227 temp
= simplify_binary_operation (PLUS
, mode
, op0
, op1
);
7228 return temp
? temp
: gen_rtx_PLUS (mode
, op0
, op1
);
7231 /* For initializers, we are allowed to return a MINUS of two
7232 symbolic constants. Here we handle all cases when both operands
7234 /* Handle difference of two symbolic constants,
7235 for the sake of an initializer. */
7236 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7237 && really_constant_p (TREE_OPERAND (exp
, 0))
7238 && really_constant_p (TREE_OPERAND (exp
, 1)))
7240 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
,
7241 VOIDmode
, ro_modifier
);
7242 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7243 VOIDmode
, ro_modifier
);
7245 /* If the last operand is a CONST_INT, use plus_constant of
7246 the negated constant. Else make the MINUS. */
7247 if (GET_CODE (op1
) == CONST_INT
)
7248 return plus_constant (op0
, - INTVAL (op1
));
7250 return gen_rtx_MINUS (mode
, op0
, op1
);
7252 /* Convert A - const to A + (-const). */
7253 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7255 tree negated
= fold (build1 (NEGATE_EXPR
, type
,
7256 TREE_OPERAND (exp
, 1)));
7258 if (TREE_UNSIGNED (type
) || TREE_OVERFLOW (negated
))
7259 /* If we can't negate the constant in TYPE, leave it alone and
7260 expand_binop will negate it for us. We used to try to do it
7261 here in the signed version of TYPE, but that doesn't work
7262 on POINTER_TYPEs. */;
7265 exp
= build (PLUS_EXPR
, type
, TREE_OPERAND (exp
, 0), negated
);
7269 this_optab
= sub_optab
;
7273 preexpand_calls (exp
);
7274 /* If first operand is constant, swap them.
7275 Thus the following special case checks need only
7276 check the second operand. */
7277 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
7279 register tree t1
= TREE_OPERAND (exp
, 0);
7280 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
7281 TREE_OPERAND (exp
, 1) = t1
;
7284 /* Attempt to return something suitable for generating an
7285 indexed address, for machines that support that. */
7287 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
7288 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7289 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
7291 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7294 /* Apply distributive law if OP0 is x+c. */
7295 if (GET_CODE (op0
) == PLUS
7296 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
)
7301 (mode
, XEXP (op0
, 0),
7302 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)))),
7303 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))
7304 * INTVAL (XEXP (op0
, 1))));
7306 if (GET_CODE (op0
) != REG
)
7307 op0
= force_operand (op0
, NULL_RTX
);
7308 if (GET_CODE (op0
) != REG
)
7309 op0
= copy_to_mode_reg (mode
, op0
);
7312 gen_rtx_MULT (mode
, op0
,
7313 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))));
7316 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7319 /* Check for multiplying things that have been extended
7320 from a narrower type. If this machine supports multiplying
7321 in that narrower type with a result in the desired type,
7322 do it that way, and avoid the explicit type-conversion. */
7323 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
7324 && TREE_CODE (type
) == INTEGER_TYPE
7325 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7326 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7327 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7328 && int_fits_type_p (TREE_OPERAND (exp
, 1),
7329 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7330 /* Don't use a widening multiply if a shift will do. */
7331 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
7332 > HOST_BITS_PER_WIDE_INT
)
7333 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
7335 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
7336 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7338 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
7339 /* If both operands are extended, they must either both
7340 be zero-extended or both be sign-extended. */
7341 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7343 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))))))
7345 enum machine_mode innermode
7346 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)));
7347 optab other_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7348 ? smul_widen_optab
: umul_widen_optab
);
7349 this_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7350 ? umul_widen_optab
: smul_widen_optab
);
7351 if (mode
== GET_MODE_WIDER_MODE (innermode
))
7353 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
7355 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7356 NULL_RTX
, VOIDmode
, 0);
7357 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7358 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7361 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7362 NULL_RTX
, VOIDmode
, 0);
7365 else if (other_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
7366 && innermode
== word_mode
)
7369 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7370 NULL_RTX
, VOIDmode
, 0);
7371 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7372 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7375 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7376 NULL_RTX
, VOIDmode
, 0);
7377 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
7378 unsignedp
, OPTAB_LIB_WIDEN
);
7379 htem
= expand_mult_highpart_adjust (innermode
,
7380 gen_highpart (innermode
, temp
),
7382 gen_highpart (innermode
, temp
),
7384 emit_move_insn (gen_highpart (innermode
, temp
), htem
);
7389 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7390 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7391 return expand_mult (mode
, op0
, op1
, target
, unsignedp
);
7393 case TRUNC_DIV_EXPR
:
7394 case FLOOR_DIV_EXPR
:
7396 case ROUND_DIV_EXPR
:
7397 case EXACT_DIV_EXPR
:
7398 preexpand_calls (exp
);
7399 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7401 /* Possible optimization: compute the dividend with EXPAND_SUM
7402 then if the divisor is constant can optimize the case
7403 where some terms of the dividend have coeffs divisible by it. */
7404 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7405 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7406 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
7409 this_optab
= flodiv_optab
;
7412 case TRUNC_MOD_EXPR
:
7413 case FLOOR_MOD_EXPR
:
7415 case ROUND_MOD_EXPR
:
7416 preexpand_calls (exp
);
7417 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7419 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7420 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7421 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
7423 case FIX_ROUND_EXPR
:
7424 case FIX_FLOOR_EXPR
:
7426 abort (); /* Not used for C. */
7428 case FIX_TRUNC_EXPR
:
7429 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
7431 target
= gen_reg_rtx (mode
);
7432 expand_fix (target
, op0
, unsignedp
);
7436 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
7438 target
= gen_reg_rtx (mode
);
7439 /* expand_float can't figure out what to do if FROM has VOIDmode.
7440 So give it the correct mode. With -O, cse will optimize this. */
7441 if (GET_MODE (op0
) == VOIDmode
)
7442 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
7444 expand_float (target
, op0
,
7445 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7449 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7450 temp
= expand_unop (mode
, neg_optab
, op0
, target
, 0);
7456 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7458 /* Handle complex values specially. */
7459 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
7460 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
7461 return expand_complex_abs (mode
, op0
, target
, unsignedp
);
7463 /* Unsigned abs is simply the operand. Testing here means we don't
7464 risk generating incorrect code below. */
7465 if (TREE_UNSIGNED (type
))
7468 return expand_abs (mode
, op0
, target
,
7469 safe_from_p (target
, TREE_OPERAND (exp
, 0), 1));
7473 target
= original_target
;
7474 if (target
== 0 || ! safe_from_p (target
, TREE_OPERAND (exp
, 1), 1)
7475 || (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
7476 || GET_MODE (target
) != mode
7477 || (GET_CODE (target
) == REG
7478 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
7479 target
= gen_reg_rtx (mode
);
7480 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7481 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
7483 /* First try to do it with a special MIN or MAX instruction.
7484 If that does not win, use a conditional jump to select the proper
7486 this_optab
= (TREE_UNSIGNED (type
)
7487 ? (code
== MIN_EXPR
? umin_optab
: umax_optab
)
7488 : (code
== MIN_EXPR
? smin_optab
: smax_optab
));
7490 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
7495 /* At this point, a MEM target is no longer useful; we will get better
7498 if (GET_CODE (target
) == MEM
)
7499 target
= gen_reg_rtx (mode
);
7502 emit_move_insn (target
, op0
);
7504 op0
= gen_label_rtx ();
7506 /* If this mode is an integer too wide to compare properly,
7507 compare word by word. Rely on cse to optimize constant cases. */
7508 if (GET_MODE_CLASS (mode
) == MODE_INT
&& ! can_compare_p (mode
, ccp_jump
))
7510 if (code
== MAX_EXPR
)
7511 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
7512 target
, op1
, NULL_RTX
, op0
);
7514 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
7515 op1
, target
, NULL_RTX
, op0
);
7519 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)));
7520 do_compare_rtx_and_jump (target
, op1
, code
== MAX_EXPR
? GE
: LE
,
7521 unsignedp
, mode
, NULL_RTX
, 0, NULL_RTX
,
7524 emit_move_insn (target
, op1
);
7529 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7530 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
7536 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7537 temp
= expand_unop (mode
, ffs_optab
, op0
, target
, 1);
7542 /* ??? Can optimize bitwise operations with one arg constant.
7543 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7544 and (a bitwise1 b) bitwise2 b (etc)
7545 but that is probably not worth while. */
7547 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7548 boolean values when we want in all cases to compute both of them. In
7549 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7550 as actual zero-or-1 values and then bitwise anding. In cases where
7551 there cannot be any side effects, better code would be made by
7552 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7553 how to recognize those cases. */
7555 case TRUTH_AND_EXPR
:
7557 this_optab
= and_optab
;
7562 this_optab
= ior_optab
;
7565 case TRUTH_XOR_EXPR
:
7567 this_optab
= xor_optab
;
7574 preexpand_calls (exp
);
7575 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7577 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7578 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
7581 /* Could determine the answer when only additive constants differ. Also,
7582 the addition of one can be handled by changing the condition. */
7589 preexpand_calls (exp
);
7590 temp
= do_store_flag (exp
, target
, tmode
!= VOIDmode
? tmode
: mode
, 0);
7594 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7595 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
7597 && GET_CODE (original_target
) == REG
7598 && (GET_MODE (original_target
)
7599 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
7601 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
7604 if (temp
!= original_target
)
7605 temp
= copy_to_reg (temp
);
7607 op1
= gen_label_rtx ();
7608 emit_cmp_and_jump_insns (temp
, const0_rtx
, EQ
, NULL_RTX
,
7609 GET_MODE (temp
), unsignedp
, 0, op1
);
7610 emit_move_insn (temp
, const1_rtx
);
7615 /* If no set-flag instruction, must generate a conditional
7616 store into a temporary variable. Drop through
7617 and handle this like && and ||. */
7619 case TRUTH_ANDIF_EXPR
:
7620 case TRUTH_ORIF_EXPR
:
7622 && (target
== 0 || ! safe_from_p (target
, exp
, 1)
7623 /* Make sure we don't have a hard reg (such as function's return
7624 value) live across basic blocks, if not optimizing. */
7625 || (!optimize
&& GET_CODE (target
) == REG
7626 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
7627 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7630 emit_clr_insn (target
);
7632 op1
= gen_label_rtx ();
7633 jumpifnot (exp
, op1
);
7636 emit_0_to_1_insn (target
);
7639 return ignore
? const0_rtx
: target
;
7641 case TRUTH_NOT_EXPR
:
7642 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
7643 /* The parser is careful to generate TRUTH_NOT_EXPR
7644 only with operands that are always zero or one. */
7645 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
7646 target
, 1, OPTAB_LIB_WIDEN
);
7652 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
7654 return expand_expr (TREE_OPERAND (exp
, 1),
7655 (ignore
? const0_rtx
: target
),
7659 /* If we would have a "singleton" (see below) were it not for a
7660 conversion in each arm, bring that conversion back out. */
7661 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
7662 && TREE_CODE (TREE_OPERAND (exp
, 2)) == NOP_EXPR
7663 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0))
7664 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 2), 0))))
7666 tree
true = TREE_OPERAND (TREE_OPERAND (exp
, 1), 0);
7667 tree
false = TREE_OPERAND (TREE_OPERAND (exp
, 2), 0);
7669 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7670 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7671 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7672 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7673 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7674 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7675 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7676 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7677 return expand_expr (build1 (NOP_EXPR
, type
,
7678 build (COND_EXPR
, TREE_TYPE (true),
7679 TREE_OPERAND (exp
, 0),
7681 target
, tmode
, modifier
);
7685 /* Note that COND_EXPRs whose type is a structure or union
7686 are required to be constructed to contain assignments of
7687 a temporary variable, so that we can evaluate them here
7688 for side effect only. If type is void, we must do likewise. */
7690 /* If an arm of the branch requires a cleanup,
7691 only that cleanup is performed. */
7694 tree binary_op
= 0, unary_op
= 0;
7696 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7697 convert it to our mode, if necessary. */
7698 if (integer_onep (TREE_OPERAND (exp
, 1))
7699 && integer_zerop (TREE_OPERAND (exp
, 2))
7700 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
7704 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
7709 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, mode
, ro_modifier
);
7710 if (GET_MODE (op0
) == mode
)
7714 target
= gen_reg_rtx (mode
);
7715 convert_move (target
, op0
, unsignedp
);
7719 /* Check for X ? A + B : A. If we have this, we can copy A to the
7720 output and conditionally add B. Similarly for unary operations.
7721 Don't do this if X has side-effects because those side effects
7722 might affect A or B and the "?" operation is a sequence point in
7723 ANSI. (operand_equal_p tests for side effects.) */
7725 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '2'
7726 && operand_equal_p (TREE_OPERAND (exp
, 2),
7727 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
7728 singleton
= TREE_OPERAND (exp
, 2), binary_op
= TREE_OPERAND (exp
, 1);
7729 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '2'
7730 && operand_equal_p (TREE_OPERAND (exp
, 1),
7731 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
7732 singleton
= TREE_OPERAND (exp
, 1), binary_op
= TREE_OPERAND (exp
, 2);
7733 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '1'
7734 && operand_equal_p (TREE_OPERAND (exp
, 2),
7735 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
7736 singleton
= TREE_OPERAND (exp
, 2), unary_op
= TREE_OPERAND (exp
, 1);
7737 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '1'
7738 && operand_equal_p (TREE_OPERAND (exp
, 1),
7739 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
7740 singleton
= TREE_OPERAND (exp
, 1), unary_op
= TREE_OPERAND (exp
, 2);
7742 /* If we are not to produce a result, we have no target. Otherwise,
7743 if a target was specified use it; it will not be used as an
7744 intermediate target unless it is safe. If no target, use a
7749 else if (original_target
7750 && (safe_from_p (original_target
, TREE_OPERAND (exp
, 0), 1)
7751 || (singleton
&& GET_CODE (original_target
) == REG
7752 && REGNO (original_target
) >= FIRST_PSEUDO_REGISTER
7753 && original_target
== var_rtx (singleton
)))
7754 && GET_MODE (original_target
) == mode
7755 #ifdef HAVE_conditional_move
7756 && (! can_conditionally_move_p (mode
)
7757 || GET_CODE (original_target
) == REG
7758 || TREE_ADDRESSABLE (type
))
7760 && ! (GET_CODE (original_target
) == MEM
7761 && MEM_VOLATILE_P (original_target
)))
7762 temp
= original_target
;
7763 else if (TREE_ADDRESSABLE (type
))
7766 temp
= assign_temp (type
, 0, 0, 1);
7768 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7769 do the test of X as a store-flag operation, do this as
7770 A + ((X != 0) << log C). Similarly for other simple binary
7771 operators. Only do for C == 1 if BRANCH_COST is low. */
7772 if (temp
&& singleton
&& binary_op
7773 && (TREE_CODE (binary_op
) == PLUS_EXPR
7774 || TREE_CODE (binary_op
) == MINUS_EXPR
7775 || TREE_CODE (binary_op
) == BIT_IOR_EXPR
7776 || TREE_CODE (binary_op
) == BIT_XOR_EXPR
)
7777 && (BRANCH_COST
>= 3 ? integer_pow2p (TREE_OPERAND (binary_op
, 1))
7778 : integer_onep (TREE_OPERAND (binary_op
, 1)))
7779 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
7782 optab boptab
= (TREE_CODE (binary_op
) == PLUS_EXPR
? add_optab
7783 : TREE_CODE (binary_op
) == MINUS_EXPR
? sub_optab
7784 : TREE_CODE (binary_op
) == BIT_IOR_EXPR
? ior_optab
7787 /* If we had X ? A : A + 1, do this as A + (X == 0).
7789 We have to invert the truth value here and then put it
7790 back later if do_store_flag fails. We cannot simply copy
7791 TREE_OPERAND (exp, 0) to another variable and modify that
7792 because invert_truthvalue can modify the tree pointed to
7794 if (singleton
== TREE_OPERAND (exp
, 1))
7795 TREE_OPERAND (exp
, 0)
7796 = invert_truthvalue (TREE_OPERAND (exp
, 0));
7798 result
= do_store_flag (TREE_OPERAND (exp
, 0),
7799 (safe_from_p (temp
, singleton
, 1)
7801 mode
, BRANCH_COST
<= 1);
7803 if (result
!= 0 && ! integer_onep (TREE_OPERAND (binary_op
, 1)))
7804 result
= expand_shift (LSHIFT_EXPR
, mode
, result
,
7805 build_int_2 (tree_log2
7809 (safe_from_p (temp
, singleton
, 1)
7810 ? temp
: NULL_RTX
), 0);
7814 op1
= expand_expr (singleton
, NULL_RTX
, VOIDmode
, 0);
7815 return expand_binop (mode
, boptab
, op1
, result
, temp
,
7816 unsignedp
, OPTAB_LIB_WIDEN
);
7818 else if (singleton
== TREE_OPERAND (exp
, 1))
7819 TREE_OPERAND (exp
, 0)
7820 = invert_truthvalue (TREE_OPERAND (exp
, 0));
7823 do_pending_stack_adjust ();
7825 op0
= gen_label_rtx ();
7827 if (singleton
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0)))
7831 /* If the target conflicts with the other operand of the
7832 binary op, we can't use it. Also, we can't use the target
7833 if it is a hard register, because evaluating the condition
7834 might clobber it. */
7836 && ! safe_from_p (temp
, TREE_OPERAND (binary_op
, 1), 1))
7837 || (GET_CODE (temp
) == REG
7838 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
))
7839 temp
= gen_reg_rtx (mode
);
7840 store_expr (singleton
, temp
, 0);
7843 expand_expr (singleton
,
7844 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
7845 if (singleton
== TREE_OPERAND (exp
, 1))
7846 jumpif (TREE_OPERAND (exp
, 0), op0
);
7848 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
7850 start_cleanup_deferral ();
7851 if (binary_op
&& temp
== 0)
7852 /* Just touch the other operand. */
7853 expand_expr (TREE_OPERAND (binary_op
, 1),
7854 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
7856 store_expr (build (TREE_CODE (binary_op
), type
,
7857 make_tree (type
, temp
),
7858 TREE_OPERAND (binary_op
, 1)),
7861 store_expr (build1 (TREE_CODE (unary_op
), type
,
7862 make_tree (type
, temp
)),
7866 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7867 comparison operator. If we have one of these cases, set the
7868 output to A, branch on A (cse will merge these two references),
7869 then set the output to FOO. */
7871 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
7872 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
7873 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7874 TREE_OPERAND (exp
, 1), 0)
7875 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
7876 || TREE_CODE (TREE_OPERAND (exp
, 1)) == SAVE_EXPR
)
7877 && safe_from_p (temp
, TREE_OPERAND (exp
, 2), 1))
7879 if (GET_CODE (temp
) == REG
&& REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
7880 temp
= gen_reg_rtx (mode
);
7881 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
7882 jumpif (TREE_OPERAND (exp
, 0), op0
);
7884 start_cleanup_deferral ();
7885 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
7889 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
7890 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
7891 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7892 TREE_OPERAND (exp
, 2), 0)
7893 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
7894 || TREE_CODE (TREE_OPERAND (exp
, 2)) == SAVE_EXPR
)
7895 && safe_from_p (temp
, TREE_OPERAND (exp
, 1), 1))
7897 if (GET_CODE (temp
) == REG
&& REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
7898 temp
= gen_reg_rtx (mode
);
7899 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
7900 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
7902 start_cleanup_deferral ();
7903 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
7908 op1
= gen_label_rtx ();
7909 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
7911 start_cleanup_deferral ();
7913 /* One branch of the cond can be void, if it never returns. For
7914 example A ? throw : E */
7916 && TREE_TYPE (TREE_OPERAND (exp
, 1)) != void_type_node
)
7917 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
7919 expand_expr (TREE_OPERAND (exp
, 1),
7920 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
7921 end_cleanup_deferral ();
7923 emit_jump_insn (gen_jump (op1
));
7926 start_cleanup_deferral ();
7928 && TREE_TYPE (TREE_OPERAND (exp
, 2)) != void_type_node
)
7929 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
7931 expand_expr (TREE_OPERAND (exp
, 2),
7932 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
7935 end_cleanup_deferral ();
7946 /* Something needs to be initialized, but we didn't know
7947 where that thing was when building the tree. For example,
7948 it could be the return value of a function, or a parameter
7949 to a function which lays down in the stack, or a temporary
7950 variable which must be passed by reference.
7952 We guarantee that the expression will either be constructed
7953 or copied into our original target. */
7955 tree slot
= TREE_OPERAND (exp
, 0);
7956 tree cleanups
= NULL_TREE
;
7959 if (TREE_CODE (slot
) != VAR_DECL
)
7963 target
= original_target
;
7965 /* Set this here so that if we get a target that refers to a
7966 register variable that's already been used, put_reg_into_stack
7967 knows that it should fix up those uses. */
7968 TREE_USED (slot
) = 1;
7972 if (DECL_RTL (slot
) != 0)
7974 target
= DECL_RTL (slot
);
7975 /* If we have already expanded the slot, so don't do
7977 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
7982 target
= assign_temp (type
, 2, 0, 1);
7983 /* All temp slots at this level must not conflict. */
7984 preserve_temp_slots (target
);
7985 DECL_RTL (slot
) = target
;
7986 if (TREE_ADDRESSABLE (slot
))
7988 TREE_ADDRESSABLE (slot
) = 0;
7989 mark_addressable (slot
);
7992 /* Since SLOT is not known to the called function
7993 to belong to its stack frame, we must build an explicit
7994 cleanup. This case occurs when we must build up a reference
7995 to pass the reference as an argument. In this case,
7996 it is very likely that such a reference need not be
7999 if (TREE_OPERAND (exp
, 2) == 0)
8000 TREE_OPERAND (exp
, 2) = maybe_build_cleanup (slot
);
8001 cleanups
= TREE_OPERAND (exp
, 2);
8006 /* This case does occur, when expanding a parameter which
8007 needs to be constructed on the stack. The target
8008 is the actual stack address that we want to initialize.
8009 The function we call will perform the cleanup in this case. */
8011 /* If we have already assigned it space, use that space,
8012 not target that we were passed in, as our target
8013 parameter is only a hint. */
8014 if (DECL_RTL (slot
) != 0)
8016 target
= DECL_RTL (slot
);
8017 /* If we have already expanded the slot, so don't do
8019 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8024 DECL_RTL (slot
) = target
;
8025 /* If we must have an addressable slot, then make sure that
8026 the RTL that we just stored in slot is OK. */
8027 if (TREE_ADDRESSABLE (slot
))
8029 TREE_ADDRESSABLE (slot
) = 0;
8030 mark_addressable (slot
);
8035 exp1
= TREE_OPERAND (exp
, 3) = TREE_OPERAND (exp
, 1);
8036 /* Mark it as expanded. */
8037 TREE_OPERAND (exp
, 1) = NULL_TREE
;
8039 store_expr (exp1
, target
, 0);
8041 expand_decl_cleanup (NULL_TREE
, cleanups
);
8048 tree lhs
= TREE_OPERAND (exp
, 0);
8049 tree rhs
= TREE_OPERAND (exp
, 1);
8050 tree noncopied_parts
= 0;
8051 tree lhs_type
= TREE_TYPE (lhs
);
8053 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
8054 if (TYPE_NONCOPIED_PARTS (lhs_type
) != 0 && !fixed_type_p (rhs
))
8055 noncopied_parts
= init_noncopied_parts (stabilize_reference (lhs
),
8056 TYPE_NONCOPIED_PARTS (lhs_type
));
8057 while (noncopied_parts
!= 0)
8059 expand_assignment (TREE_VALUE (noncopied_parts
),
8060 TREE_PURPOSE (noncopied_parts
), 0, 0);
8061 noncopied_parts
= TREE_CHAIN (noncopied_parts
);
8068 /* If lhs is complex, expand calls in rhs before computing it.
8069 That's so we don't compute a pointer and save it over a call.
8070 If lhs is simple, compute it first so we can give it as a
8071 target if the rhs is just a call. This avoids an extra temp and copy
8072 and that prevents a partial-subsumption which makes bad code.
8073 Actually we could treat component_ref's of vars like vars. */
8075 tree lhs
= TREE_OPERAND (exp
, 0);
8076 tree rhs
= TREE_OPERAND (exp
, 1);
8077 tree noncopied_parts
= 0;
8078 tree lhs_type
= TREE_TYPE (lhs
);
8082 if (TREE_CODE (lhs
) != VAR_DECL
8083 && TREE_CODE (lhs
) != RESULT_DECL
8084 && TREE_CODE (lhs
) != PARM_DECL
8085 && ! (TREE_CODE (lhs
) == INDIRECT_REF
8086 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs
, 0)))))
8087 preexpand_calls (exp
);
8089 /* Check for |= or &= of a bitfield of size one into another bitfield
8090 of size 1. In this case, (unless we need the result of the
8091 assignment) we can do this more efficiently with a
8092 test followed by an assignment, if necessary.
8094 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8095 things change so we do, this code should be enhanced to
8098 && TREE_CODE (lhs
) == COMPONENT_REF
8099 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
8100 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
8101 && TREE_OPERAND (rhs
, 0) == lhs
8102 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
8103 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs
, 1))) == 1
8104 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))) == 1)
8106 rtx label
= gen_label_rtx ();
8108 do_jump (TREE_OPERAND (rhs
, 1),
8109 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
8110 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
8111 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
8112 (TREE_CODE (rhs
) == BIT_IOR_EXPR
8114 : integer_zero_node
)),
8116 do_pending_stack_adjust ();
8121 if (TYPE_NONCOPIED_PARTS (lhs_type
) != 0
8122 && ! (fixed_type_p (lhs
) && fixed_type_p (rhs
)))
8123 noncopied_parts
= save_noncopied_parts (stabilize_reference (lhs
),
8124 TYPE_NONCOPIED_PARTS (lhs_type
));
8126 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
8127 while (noncopied_parts
!= 0)
8129 expand_assignment (TREE_PURPOSE (noncopied_parts
),
8130 TREE_VALUE (noncopied_parts
), 0, 0);
8131 noncopied_parts
= TREE_CHAIN (noncopied_parts
);
8137 if (!TREE_OPERAND (exp
, 0))
8138 expand_null_return ();
8140 expand_return (TREE_OPERAND (exp
, 0));
8143 case PREINCREMENT_EXPR
:
8144 case PREDECREMENT_EXPR
:
8145 return expand_increment (exp
, 0, ignore
);
8147 case POSTINCREMENT_EXPR
:
8148 case POSTDECREMENT_EXPR
:
8149 /* Faster to treat as pre-increment if result is not used. */
8150 return expand_increment (exp
, ! ignore
, ignore
);
8153 /* If nonzero, TEMP will be set to the address of something that might
8154 be a MEM corresponding to a stack slot. */
8157 /* Are we taking the address of a nested function? */
8158 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == FUNCTION_DECL
8159 && decl_function_context (TREE_OPERAND (exp
, 0)) != 0
8160 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp
, 0))
8161 && ! TREE_STATIC (exp
))
8163 op0
= trampoline_address (TREE_OPERAND (exp
, 0));
8164 op0
= force_operand (op0
, target
);
8166 /* If we are taking the address of something erroneous, just
8168 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
8172 /* We make sure to pass const0_rtx down if we came in with
8173 ignore set, to avoid doing the cleanups twice for something. */
8174 op0
= expand_expr (TREE_OPERAND (exp
, 0),
8175 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
,
8176 (modifier
== EXPAND_INITIALIZER
8177 ? modifier
: EXPAND_CONST_ADDRESS
));
8179 /* If we are going to ignore the result, OP0 will have been set
8180 to const0_rtx, so just return it. Don't get confused and
8181 think we are taking the address of the constant. */
8185 op0
= protect_from_queue (op0
, 0);
8187 /* We would like the object in memory. If it is a constant, we can
8188 have it be statically allocated into memory. For a non-constant,
8189 we need to allocate some memory and store the value into it. */
8191 if (CONSTANT_P (op0
))
8192 op0
= force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
8194 else if (GET_CODE (op0
) == MEM
)
8196 mark_temp_addr_taken (op0
);
8197 temp
= XEXP (op0
, 0);
8200 else if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
8201 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
)
8203 /* If this object is in a register, it must be not
8205 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8206 rtx memloc
= assign_temp (inner_type
, 1, 1, 1);
8208 mark_temp_addr_taken (memloc
);
8209 emit_move_insn (memloc
, op0
);
8213 if (GET_CODE (op0
) != MEM
)
8216 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8218 temp
= XEXP (op0
, 0);
8219 #ifdef POINTERS_EXTEND_UNSIGNED
8220 if (GET_MODE (temp
) == Pmode
&& GET_MODE (temp
) != mode
8221 && mode
== ptr_mode
)
8222 temp
= convert_memory_address (ptr_mode
, temp
);
8227 op0
= force_operand (XEXP (op0
, 0), target
);
8230 if (flag_force_addr
&& GET_CODE (op0
) != REG
)
8231 op0
= force_reg (Pmode
, op0
);
8233 if (GET_CODE (op0
) == REG
8234 && ! REG_USERVAR_P (op0
))
8235 mark_reg_pointer (op0
, TYPE_ALIGN (TREE_TYPE (type
)) / BITS_PER_UNIT
);
8237 /* If we might have had a temp slot, add an equivalent address
8240 update_temp_slot_address (temp
, op0
);
8242 #ifdef POINTERS_EXTEND_UNSIGNED
8243 if (GET_MODE (op0
) == Pmode
&& GET_MODE (op0
) != mode
8244 && mode
== ptr_mode
)
8245 op0
= convert_memory_address (ptr_mode
, op0
);
8250 case ENTRY_VALUE_EXPR
:
8253 /* COMPLEX type for Extended Pascal & Fortran */
8256 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
8259 /* Get the rtx code of the operands. */
8260 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8261 op1
= expand_expr (TREE_OPERAND (exp
, 1), 0, VOIDmode
, 0);
8264 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
8268 /* Move the real (op0) and imaginary (op1) parts to their location. */
8269 emit_move_insn (gen_realpart (mode
, target
), op0
);
8270 emit_move_insn (gen_imagpart (mode
, target
), op1
);
8272 insns
= get_insns ();
8275 /* Complex construction should appear as a single unit. */
8276 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8277 each with a separate pseudo as destination.
8278 It's not correct for flow to treat them as a unit. */
8279 if (GET_CODE (target
) != CONCAT
)
8280 emit_no_conflict_block (insns
, target
, op0
, op1
, NULL_RTX
);
8288 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8289 return gen_realpart (mode
, op0
);
8292 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8293 return gen_imagpart (mode
, op0
);
8297 enum machine_mode partmode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
8301 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8304 target
= gen_reg_rtx (mode
);
8308 /* Store the realpart and the negated imagpart to target. */
8309 emit_move_insn (gen_realpart (partmode
, target
),
8310 gen_realpart (partmode
, op0
));
8312 imag_t
= gen_imagpart (partmode
, target
);
8313 temp
= expand_unop (partmode
, neg_optab
,
8314 gen_imagpart (partmode
, op0
), imag_t
, 0);
8316 emit_move_insn (imag_t
, temp
);
8318 insns
= get_insns ();
8321 /* Conjugate should appear as a single unit
8322 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8323 each with a separate pseudo as destination.
8324 It's not correct for flow to treat them as a unit. */
8325 if (GET_CODE (target
) != CONCAT
)
8326 emit_no_conflict_block (insns
, target
, op0
, NULL_RTX
, NULL_RTX
);
8333 case TRY_CATCH_EXPR
:
8335 tree handler
= TREE_OPERAND (exp
, 1);
8337 expand_eh_region_start ();
8339 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8341 expand_eh_region_end (handler
);
8346 case TRY_FINALLY_EXPR
:
8348 tree try_block
= TREE_OPERAND (exp
, 0);
8349 tree finally_block
= TREE_OPERAND (exp
, 1);
8350 rtx finally_label
= gen_label_rtx ();
8351 rtx done_label
= gen_label_rtx ();
8352 rtx return_link
= gen_reg_rtx (Pmode
);
8353 tree cleanup
= build (GOTO_SUBROUTINE_EXPR
, void_type_node
,
8354 (tree
) finally_label
, (tree
) return_link
);
8355 TREE_SIDE_EFFECTS (cleanup
) = 1;
8357 /* Start a new binding layer that will keep track of all cleanup
8358 actions to be performed. */
8359 expand_start_bindings (2);
8361 target_temp_slot_level
= temp_slot_level
;
8363 expand_decl_cleanup (NULL_TREE
, cleanup
);
8364 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
8366 preserve_temp_slots (op0
);
8367 expand_end_bindings (NULL_TREE
, 0, 0);
8368 emit_jump (done_label
);
8369 emit_label (finally_label
);
8370 expand_expr (finally_block
, const0_rtx
, VOIDmode
, 0);
8371 emit_indirect_jump (return_link
);
8372 emit_label (done_label
);
8376 case GOTO_SUBROUTINE_EXPR
:
8378 rtx subr
= (rtx
) TREE_OPERAND (exp
, 0);
8379 rtx return_link
= *(rtx
*) &TREE_OPERAND (exp
, 1);
8380 rtx return_address
= gen_label_rtx ();
8381 emit_move_insn (return_link
, gen_rtx_LABEL_REF (Pmode
, return_address
));
8383 emit_label (return_address
);
8389 rtx dcc
= get_dynamic_cleanup_chain ();
8390 emit_move_insn (dcc
, validize_mem (gen_rtx_MEM (Pmode
, dcc
)));
8396 rtx dhc
= get_dynamic_handler_chain ();
8397 emit_move_insn (dhc
, validize_mem (gen_rtx_MEM (Pmode
, dhc
)));
8402 return expand_builtin_va_arg (TREE_OPERAND (exp
, 0), type
);
8405 return (*lang_expand_expr
) (exp
, original_target
, tmode
, modifier
);
8408 /* Here to do an ordinary binary operator, generating an instruction
8409 from the optab already placed in `this_optab'. */
8411 preexpand_calls (exp
);
8412 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8414 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8415 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8417 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
8418 unsignedp
, OPTAB_LIB_WIDEN
);
8424 /* Similar to expand_expr, except that we don't specify a target, target
8425 mode, or modifier and we return the alignment of the inner type. This is
8426 used in cases where it is not necessary to align the result to the
8427 alignment of its type as long as we know the alignment of the result, for
8428 example for comparisons of BLKmode values. */
8431 expand_expr_unaligned (exp
, palign
)
8436 tree type
= TREE_TYPE (exp
);
8437 register enum machine_mode mode
= TYPE_MODE (type
);
8439 /* Default the alignment we return to that of the type. */
8440 *palign
= TYPE_ALIGN (type
);
8442 /* The only cases in which we do anything special is if the resulting mode
8444 if (mode
!= BLKmode
)
8445 return expand_expr (exp
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
8447 switch (TREE_CODE (exp
))
8451 case NON_LVALUE_EXPR
:
8452 /* Conversions between BLKmode values don't change the underlying
8453 alignment or value. */
8454 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == BLKmode
)
8455 return expand_expr_unaligned (TREE_OPERAND (exp
, 0), palign
);
8459 /* Much of the code for this case is copied directly from expand_expr.
8460 We need to duplicate it here because we will do something different
8461 in the fall-through case, so we need to handle the same exceptions
8464 tree array
= TREE_OPERAND (exp
, 0);
8465 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
8466 tree low_bound
= domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
8467 tree index
= TREE_OPERAND (exp
, 1);
8468 tree index_type
= TREE_TYPE (index
);
8471 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
8474 /* Optimize the special-case of a zero lower bound.
8476 We convert the low_bound to sizetype to avoid some problems
8477 with constant folding. (E.g. suppose the lower bound is 1,
8478 and its mode is QI. Without the conversion, (ARRAY
8479 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8480 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
8482 But sizetype isn't quite right either (especially if
8483 the lowbound is negative). FIXME */
8485 if (! integer_zerop (low_bound
))
8486 index
= fold (build (MINUS_EXPR
, index_type
, index
,
8487 convert (sizetype
, low_bound
)));
8489 /* If this is a constant index into a constant array,
8490 just get the value from the array. Handle both the cases when
8491 we have an explicit constructor and when our operand is a variable
8492 that was declared const. */
8494 if (TREE_CODE (array
) == CONSTRUCTOR
&& ! TREE_SIDE_EFFECTS (array
))
8496 if (TREE_CODE (index
) == INTEGER_CST
8497 && TREE_INT_CST_HIGH (index
) == 0)
8499 tree elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0));
8501 i
= TREE_INT_CST_LOW (index
);
8503 elem
= TREE_CHAIN (elem
);
8505 return expand_expr_unaligned (fold (TREE_VALUE (elem
)),
8510 else if (optimize
>= 1
8511 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
8512 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
8513 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
)
8515 if (TREE_CODE (index
) == INTEGER_CST
)
8517 tree init
= DECL_INITIAL (array
);
8519 i
= TREE_INT_CST_LOW (index
);
8520 if (TREE_CODE (init
) == CONSTRUCTOR
)
8522 tree elem
= CONSTRUCTOR_ELTS (init
);
8525 && !tree_int_cst_equal (TREE_PURPOSE (elem
), index
))
8526 elem
= TREE_CHAIN (elem
);
8528 return expand_expr_unaligned (fold (TREE_VALUE (elem
)),
8535 /* ... fall through ... */
8539 /* If the operand is a CONSTRUCTOR, we can just extract the
8540 appropriate field if it is present. Don't do this if we have
8541 already written the data since we want to refer to that copy
8542 and varasm.c assumes that's what we'll do. */
8543 if (TREE_CODE (exp
) != ARRAY_REF
8544 && TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
8545 && TREE_CST_RTL (TREE_OPERAND (exp
, 0)) == 0)
8549 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
8550 elt
= TREE_CHAIN (elt
))
8551 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1))
8552 /* Note that unlike the case in expand_expr, we know this is
8553 BLKmode and hence not an integer. */
8554 return expand_expr_unaligned (TREE_VALUE (elt
), palign
);
8558 enum machine_mode mode1
;
8565 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
8566 &mode1
, &unsignedp
, &volatilep
,
8569 /* If we got back the original object, something is wrong. Perhaps
8570 we are evaluating an expression too early. In any event, don't
8571 infinitely recurse. */
8575 op0
= expand_expr (tem
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
8577 /* If this is a constant, put it into a register if it is a
8578 legitimate constant and OFFSET is 0 and memory if it isn't. */
8579 if (CONSTANT_P (op0
))
8581 enum machine_mode inner_mode
= TYPE_MODE (TREE_TYPE (tem
));
8583 if (inner_mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
)
8585 op0
= force_reg (inner_mode
, op0
);
8587 op0
= validize_mem (force_const_mem (inner_mode
, op0
));
8592 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
8594 /* If this object is in a register, put it into memory.
8595 This case can't occur in C, but can in Ada if we have
8596 unchecked conversion of an expression from a scalar type to
8597 an array or record type. */
8598 if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
8599 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
)
8601 rtx memloc
= assign_temp (TREE_TYPE (tem
), 1, 1, 1);
8603 mark_temp_addr_taken (memloc
);
8604 emit_move_insn (memloc
, op0
);
8608 if (GET_CODE (op0
) != MEM
)
8611 if (GET_MODE (offset_rtx
) != ptr_mode
)
8613 #ifdef POINTERS_EXTEND_UNSIGNED
8614 offset_rtx
= convert_memory_address (ptr_mode
, offset_rtx
);
8616 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
8620 op0
= change_address (op0
, VOIDmode
,
8621 gen_rtx_PLUS (ptr_mode
, XEXP (op0
, 0),
8622 force_reg (ptr_mode
,
8626 /* Don't forget about volatility even if this is a bitfield. */
8627 if (GET_CODE (op0
) == MEM
&& volatilep
&& ! MEM_VOLATILE_P (op0
))
8629 op0
= copy_rtx (op0
);
8630 MEM_VOLATILE_P (op0
) = 1;
8633 /* Check the access. */
8634 if (current_function_check_memory_usage
&& GET_CODE (op0
) == MEM
)
8639 to
= plus_constant (XEXP (op0
, 0), (bitpos
/ BITS_PER_UNIT
));
8640 size
= (bitpos
% BITS_PER_UNIT
) + bitsize
+ BITS_PER_UNIT
- 1;
8642 /* Check the access right of the pointer. */
8643 if (size
> BITS_PER_UNIT
)
8644 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
8645 to
, ptr_mode
, GEN_INT (size
/ BITS_PER_UNIT
),
8646 TYPE_MODE (sizetype
),
8647 GEN_INT (MEMORY_USE_RO
),
8648 TYPE_MODE (integer_type_node
));
8651 /* In cases where an aligned union has an unaligned object
8652 as a field, we might be extracting a BLKmode value from
8653 an integer-mode (e.g., SImode) object. Handle this case
8654 by doing the extract into an object as wide as the field
8655 (which we know to be the width of a basic mode), then
8656 storing into memory, and changing the mode to BLKmode.
8657 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8658 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8659 if (mode1
== VOIDmode
8660 || GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
8661 || (SLOW_UNALIGNED_ACCESS
8662 && (TYPE_ALIGN (type
) > alignment
* BITS_PER_UNIT
8663 || bitpos
% TYPE_ALIGN (type
) != 0)))
8665 enum machine_mode ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
8667 if (ext_mode
== BLKmode
)
8669 /* In this case, BITPOS must start at a byte boundary. */
8670 if (GET_CODE (op0
) != MEM
8671 || bitpos
% BITS_PER_UNIT
!= 0)
8674 op0
= change_address (op0
, VOIDmode
,
8675 plus_constant (XEXP (op0
, 0),
8676 bitpos
/ BITS_PER_UNIT
));
8680 rtx
new = assign_stack_temp (ext_mode
,
8681 bitsize
/ BITS_PER_UNIT
, 0);
8683 op0
= extract_bit_field (validize_mem (op0
), bitsize
, bitpos
,
8684 unsignedp
, NULL_RTX
, ext_mode
,
8685 ext_mode
, alignment
,
8686 int_size_in_bytes (TREE_TYPE (tem
)));
8688 /* If the result is a record type and BITSIZE is narrower than
8689 the mode of OP0, an integral mode, and this is a big endian
8690 machine, we must put the field into the high-order bits. */
8691 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
8692 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
8693 && bitsize
< GET_MODE_BITSIZE (GET_MODE (op0
)))
8694 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
8695 size_int (GET_MODE_BITSIZE
8701 emit_move_insn (new, op0
);
8702 op0
= copy_rtx (new);
8703 PUT_MODE (op0
, BLKmode
);
8707 /* Get a reference to just this component. */
8708 op0
= change_address (op0
, mode1
,
8709 plus_constant (XEXP (op0
, 0),
8710 (bitpos
/ BITS_PER_UNIT
)));
8712 MEM_ALIAS_SET (op0
) = get_alias_set (exp
);
8714 /* Adjust the alignment in case the bit position is not
8715 a multiple of the alignment of the inner object. */
8716 while (bitpos
% alignment
!= 0)
8719 if (GET_CODE (XEXP (op0
, 0)) == REG
)
8720 mark_reg_pointer (XEXP (op0
, 0), alignment
);
8722 MEM_IN_STRUCT_P (op0
) = 1;
8723 MEM_VOLATILE_P (op0
) |= volatilep
;
8725 *palign
= alignment
;
8734 return expand_expr (exp
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
8737 /* Return the tree node and offset if a given argument corresponds to
8738 a string constant. */
8741 string_constant (arg
, ptr_offset
)
8747 if (TREE_CODE (arg
) == ADDR_EXPR
8748 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
8750 *ptr_offset
= integer_zero_node
;
8751 return TREE_OPERAND (arg
, 0);
8753 else if (TREE_CODE (arg
) == PLUS_EXPR
)
8755 tree arg0
= TREE_OPERAND (arg
, 0);
8756 tree arg1
= TREE_OPERAND (arg
, 1);
8761 if (TREE_CODE (arg0
) == ADDR_EXPR
8762 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
)
8765 return TREE_OPERAND (arg0
, 0);
8767 else if (TREE_CODE (arg1
) == ADDR_EXPR
8768 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
)
8771 return TREE_OPERAND (arg1
, 0);
8778 /* Expand code for a post- or pre- increment or decrement
8779 and return the RTX for the result.
8780 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8783 expand_increment (exp
, post
, ignore
)
8787 register rtx op0
, op1
;
8788 register rtx temp
, value
;
8789 register tree incremented
= TREE_OPERAND (exp
, 0);
8790 optab this_optab
= add_optab
;
8792 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
8793 int op0_is_copy
= 0;
8794 int single_insn
= 0;
8795 /* 1 means we can't store into OP0 directly,
8796 because it is a subreg narrower than a word,
8797 and we don't dare clobber the rest of the word. */
8800 /* Stabilize any component ref that might need to be
8801 evaluated more than once below. */
8803 || TREE_CODE (incremented
) == BIT_FIELD_REF
8804 || (TREE_CODE (incremented
) == COMPONENT_REF
8805 && (TREE_CODE (TREE_OPERAND (incremented
, 0)) != INDIRECT_REF
8806 || DECL_BIT_FIELD (TREE_OPERAND (incremented
, 1)))))
8807 incremented
= stabilize_reference (incremented
);
8808 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8809 ones into save exprs so that they don't accidentally get evaluated
8810 more than once by the code below. */
8811 if (TREE_CODE (incremented
) == PREINCREMENT_EXPR
8812 || TREE_CODE (incremented
) == PREDECREMENT_EXPR
)
8813 incremented
= save_expr (incremented
);
8815 /* Compute the operands as RTX.
8816 Note whether OP0 is the actual lvalue or a copy of it:
8817 I believe it is a copy iff it is a register or subreg
8818 and insns were generated in computing it. */
8820 temp
= get_last_insn ();
8821 op0
= expand_expr (incremented
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_RW
);
8823 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8824 in place but instead must do sign- or zero-extension during assignment,
8825 so we copy it into a new register and let the code below use it as
8828 Note that we can safely modify this SUBREG since it is know not to be
8829 shared (it was made by the expand_expr call above). */
8831 if (GET_CODE (op0
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (op0
))
8834 SUBREG_REG (op0
) = copy_to_reg (SUBREG_REG (op0
));
8838 else if (GET_CODE (op0
) == SUBREG
8839 && GET_MODE_BITSIZE (GET_MODE (op0
)) < BITS_PER_WORD
)
8841 /* We cannot increment this SUBREG in place. If we are
8842 post-incrementing, get a copy of the old value. Otherwise,
8843 just mark that we cannot increment in place. */
8845 op0
= copy_to_reg (op0
);
8850 op0_is_copy
= ((GET_CODE (op0
) == SUBREG
|| GET_CODE (op0
) == REG
)
8851 && temp
!= get_last_insn ());
8852 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
,
8853 EXPAND_MEMORY_USE_BAD
);
8855 /* Decide whether incrementing or decrementing. */
8856 if (TREE_CODE (exp
) == POSTDECREMENT_EXPR
8857 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
8858 this_optab
= sub_optab
;
8860 /* Convert decrement by a constant into a negative increment. */
8861 if (this_optab
== sub_optab
8862 && GET_CODE (op1
) == CONST_INT
)
8864 op1
= GEN_INT (- INTVAL (op1
));
8865 this_optab
= add_optab
;
8868 /* For a preincrement, see if we can do this with a single instruction. */
8871 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
8872 if (icode
!= (int) CODE_FOR_nothing
8873 /* Make sure that OP0 is valid for operands 0 and 1
8874 of the insn we want to queue. */
8875 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
8876 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
)
8877 && (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
8881 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8882 then we cannot just increment OP0. We must therefore contrive to
8883 increment the original value. Then, for postincrement, we can return
8884 OP0 since it is a copy of the old value. For preincrement, expand here
8885 unless we can do it with a single insn.
8887 Likewise if storing directly into OP0 would clobber high bits
8888 we need to preserve (bad_subreg). */
8889 if (op0_is_copy
|| (!post
&& !single_insn
) || bad_subreg
)
8891 /* This is the easiest way to increment the value wherever it is.
8892 Problems with multiple evaluation of INCREMENTED are prevented
8893 because either (1) it is a component_ref or preincrement,
8894 in which case it was stabilized above, or (2) it is an array_ref
8895 with constant index in an array in a register, which is
8896 safe to reevaluate. */
8897 tree newexp
= build (((TREE_CODE (exp
) == POSTDECREMENT_EXPR
8898 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
8899 ? MINUS_EXPR
: PLUS_EXPR
),
8902 TREE_OPERAND (exp
, 1));
8904 while (TREE_CODE (incremented
) == NOP_EXPR
8905 || TREE_CODE (incremented
) == CONVERT_EXPR
)
8907 newexp
= convert (TREE_TYPE (incremented
), newexp
);
8908 incremented
= TREE_OPERAND (incremented
, 0);
8911 temp
= expand_assignment (incremented
, newexp
, ! post
&& ! ignore
, 0);
8912 return post
? op0
: temp
;
8917 /* We have a true reference to the value in OP0.
8918 If there is an insn to add or subtract in this mode, queue it.
8919 Queueing the increment insn avoids the register shuffling
8920 that often results if we must increment now and first save
8921 the old value for subsequent use. */
8923 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8924 op0
= stabilize (op0
);
8927 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
8928 if (icode
!= (int) CODE_FOR_nothing
8929 /* Make sure that OP0 is valid for operands 0 and 1
8930 of the insn we want to queue. */
8931 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
8932 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
))
8934 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
8935 op1
= force_reg (mode
, op1
);
8937 return enqueue_insn (op0
, GEN_FCN (icode
) (op0
, op0
, op1
));
8939 if (icode
!= (int) CODE_FOR_nothing
&& GET_CODE (op0
) == MEM
)
8941 rtx addr
= (general_operand (XEXP (op0
, 0), mode
)
8942 ? force_reg (Pmode
, XEXP (op0
, 0))
8943 : copy_to_reg (XEXP (op0
, 0)));
8946 op0
= change_address (op0
, VOIDmode
, addr
);
8947 temp
= force_reg (GET_MODE (op0
), op0
);
8948 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
8949 op1
= force_reg (mode
, op1
);
8951 /* The increment queue is LIFO, thus we have to `queue'
8952 the instructions in reverse order. */
8953 enqueue_insn (op0
, gen_move_insn (op0
, temp
));
8954 result
= enqueue_insn (temp
, GEN_FCN (icode
) (temp
, temp
, op1
));
8959 /* Preincrement, or we can't increment with one simple insn. */
8961 /* Save a copy of the value before inc or dec, to return it later. */
8962 temp
= value
= copy_to_reg (op0
);
8964 /* Arrange to return the incremented value. */
8965 /* Copy the rtx because expand_binop will protect from the queue,
8966 and the results of that would be invalid for us to return
8967 if our caller does emit_queue before using our result. */
8968 temp
= copy_rtx (value
= op0
);
8970 /* Increment however we can. */
8971 op1
= expand_binop (mode
, this_optab
, value
, op1
,
8972 current_function_check_memory_usage
? NULL_RTX
: op0
,
8973 TREE_UNSIGNED (TREE_TYPE (exp
)), OPTAB_LIB_WIDEN
);
8974 /* Make sure the value is stored into OP0. */
8976 emit_move_insn (op0
, op1
);
8981 /* Expand all function calls contained within EXP, innermost ones first.
8982 But don't look within expressions that have sequence points.
8983 For each CALL_EXPR, record the rtx for its value
8984 in the CALL_EXPR_RTL field. */
8987 preexpand_calls (exp
)
8990 register int nops
, i
;
8991 int type
= TREE_CODE_CLASS (TREE_CODE (exp
));
8993 if (! do_preexpand_calls
)
8996 /* Only expressions and references can contain calls. */
8998 if (type
!= 'e' && type
!= '<' && type
!= '1' && type
!= '2' && type
!= 'r')
9001 switch (TREE_CODE (exp
))
9004 /* Do nothing if already expanded. */
9005 if (CALL_EXPR_RTL (exp
) != 0
9006 /* Do nothing if the call returns a variable-sized object. */
9007 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp
))) != INTEGER_CST
9008 /* Do nothing to built-in functions. */
9009 || (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
9010 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
9012 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
9015 CALL_EXPR_RTL (exp
) = expand_call (exp
, NULL_RTX
, 0);
9020 case TRUTH_ANDIF_EXPR
:
9021 case TRUTH_ORIF_EXPR
:
9022 /* If we find one of these, then we can be sure
9023 the adjust will be done for it (since it makes jumps).
9024 Do it now, so that if this is inside an argument
9025 of a function, we don't get the stack adjustment
9026 after some other args have already been pushed. */
9027 do_pending_stack_adjust ();
9032 case WITH_CLEANUP_EXPR
:
9033 case CLEANUP_POINT_EXPR
:
9034 case TRY_CATCH_EXPR
:
9038 if (SAVE_EXPR_RTL (exp
) != 0)
9045 nops
= tree_code_length
[(int) TREE_CODE (exp
)];
9046 for (i
= 0; i
< nops
; i
++)
9047 if (TREE_OPERAND (exp
, i
) != 0)
9049 if (TREE_CODE (exp
) == TARGET_EXPR
&& i
== 2)
9050 /* We don't need to preexpand the cleanup for a TARGET_EXPR.
9051 It doesn't happen before the call is made. */
9055 type
= TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, i
)));
9056 if (type
== 'e' || type
== '<' || type
== '1' || type
== '2'
9058 preexpand_calls (TREE_OPERAND (exp
, i
));
9063 /* At the start of a function, record that we have no previously-pushed
9064 arguments waiting to be popped. */
9067 init_pending_stack_adjust ()
9069 pending_stack_adjust
= 0;
9072 /* When exiting from function, if safe, clear out any pending stack adjust
9073 so the adjustment won't get done.
9075 Note, if the current function calls alloca, then it must have a
9076 frame pointer regardless of the value of flag_omit_frame_pointer. */
9079 clear_pending_stack_adjust ()
9081 #ifdef EXIT_IGNORE_STACK
9083 && (! flag_omit_frame_pointer
|| current_function_calls_alloca
)
9084 && EXIT_IGNORE_STACK
9085 && ! (DECL_INLINE (current_function_decl
) && ! flag_no_inline
)
9086 && ! flag_inline_functions
)
9087 pending_stack_adjust
= 0;
9091 /* Pop any previously-pushed arguments that have not been popped yet. */
9094 do_pending_stack_adjust ()
9096 if (inhibit_defer_pop
== 0)
9098 if (pending_stack_adjust
!= 0)
9099 adjust_stack (GEN_INT (pending_stack_adjust
));
9100 pending_stack_adjust
= 0;
9104 /* Expand conditional expressions. */
9106 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9107 LABEL is an rtx of code CODE_LABEL, in this function and all the
9111 jumpifnot (exp
, label
)
9115 do_jump (exp
, label
, NULL_RTX
);
9118 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9125 do_jump (exp
, NULL_RTX
, label
);
9128 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9129 the result is zero, or IF_TRUE_LABEL if the result is one.
9130 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9131 meaning fall through in that case.
9133 do_jump always does any pending stack adjust except when it does not
9134 actually perform a jump. An example where there is no jump
9135 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9137 This function is responsible for optimizing cases such as
9138 &&, || and comparison operators in EXP. */
9141 do_jump (exp
, if_false_label
, if_true_label
)
9143 rtx if_false_label
, if_true_label
;
9145 register enum tree_code code
= TREE_CODE (exp
);
9146 /* Some cases need to create a label to jump to
9147 in order to properly fall through.
9148 These cases set DROP_THROUGH_LABEL nonzero. */
9149 rtx drop_through_label
= 0;
9153 enum machine_mode mode
;
9155 #ifdef MAX_INTEGER_COMPUTATION_MODE
9156 check_max_integer_computation_mode (exp
);
9167 temp
= integer_zerop (exp
) ? if_false_label
: if_true_label
;
9173 /* This is not true with #pragma weak */
9175 /* The address of something can never be zero. */
9177 emit_jump (if_true_label
);
9182 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == COMPONENT_REF
9183 || TREE_CODE (TREE_OPERAND (exp
, 0)) == BIT_FIELD_REF
9184 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_REF
)
9187 /* If we are narrowing the operand, we have to do the compare in the
9189 if ((TYPE_PRECISION (TREE_TYPE (exp
))
9190 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
9192 case NON_LVALUE_EXPR
:
9193 case REFERENCE_EXPR
:
9198 /* These cannot change zero->non-zero or vice versa. */
9199 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9202 case WITH_RECORD_EXPR
:
9203 /* Put the object on the placeholder list, recurse through our first
9204 operand, and pop the list. */
9205 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
9207 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9208 placeholder_list
= TREE_CHAIN (placeholder_list
);
9212 /* This is never less insns than evaluating the PLUS_EXPR followed by
9213 a test and can be longer if the test is eliminated. */
9215 /* Reduce to minus. */
9216 exp
= build (MINUS_EXPR
, TREE_TYPE (exp
),
9217 TREE_OPERAND (exp
, 0),
9218 fold (build1 (NEGATE_EXPR
, TREE_TYPE (TREE_OPERAND (exp
, 1)),
9219 TREE_OPERAND (exp
, 1))));
9220 /* Process as MINUS. */
9224 /* Non-zero iff operands of minus differ. */
9225 do_compare_and_jump (build (NE_EXPR
, TREE_TYPE (exp
),
9226 TREE_OPERAND (exp
, 0),
9227 TREE_OPERAND (exp
, 1)),
9228 NE
, NE
, if_false_label
, if_true_label
);
9232 /* If we are AND'ing with a small constant, do this comparison in the
9233 smallest type that fits. If the machine doesn't have comparisons
9234 that small, it will be converted back to the wider comparison.
9235 This helps if we are testing the sign bit of a narrower object.
9236 combine can't do this for us because it can't know whether a
9237 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9239 if (! SLOW_BYTE_ACCESS
9240 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
9241 && TYPE_PRECISION (TREE_TYPE (exp
)) <= HOST_BITS_PER_WIDE_INT
9242 && (i
= floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)))) >= 0
9243 && (mode
= mode_for_size (i
+ 1, MODE_INT
, 0)) != BLKmode
9244 && (type
= type_for_mode (mode
, 1)) != 0
9245 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
9246 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
9247 != CODE_FOR_nothing
))
9249 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
9254 case TRUTH_NOT_EXPR
:
9255 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9258 case TRUTH_ANDIF_EXPR
:
9259 if (if_false_label
== 0)
9260 if_false_label
= drop_through_label
= gen_label_rtx ();
9261 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, NULL_RTX
);
9262 start_cleanup_deferral ();
9263 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9264 end_cleanup_deferral ();
9267 case TRUTH_ORIF_EXPR
:
9268 if (if_true_label
== 0)
9269 if_true_label
= drop_through_label
= gen_label_rtx ();
9270 do_jump (TREE_OPERAND (exp
, 0), NULL_RTX
, if_true_label
);
9271 start_cleanup_deferral ();
9272 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9273 end_cleanup_deferral ();
9278 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
9279 preserve_temp_slots (NULL_RTX
);
9283 do_pending_stack_adjust ();
9284 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9291 int bitsize
, bitpos
, unsignedp
;
9292 enum machine_mode mode
;
9298 /* Get description of this reference. We don't actually care
9299 about the underlying object here. */
9300 get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
9301 &mode
, &unsignedp
, &volatilep
,
9304 type
= type_for_size (bitsize
, unsignedp
);
9305 if (! SLOW_BYTE_ACCESS
9306 && type
!= 0 && bitsize
>= 0
9307 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
9308 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
9309 != CODE_FOR_nothing
))
9311 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
9318 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9319 if (integer_onep (TREE_OPERAND (exp
, 1))
9320 && integer_zerop (TREE_OPERAND (exp
, 2)))
9321 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9323 else if (integer_zerop (TREE_OPERAND (exp
, 1))
9324 && integer_onep (TREE_OPERAND (exp
, 2)))
9325 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9329 register rtx label1
= gen_label_rtx ();
9330 drop_through_label
= gen_label_rtx ();
9332 do_jump (TREE_OPERAND (exp
, 0), label1
, NULL_RTX
);
9334 start_cleanup_deferral ();
9335 /* Now the THEN-expression. */
9336 do_jump (TREE_OPERAND (exp
, 1),
9337 if_false_label
? if_false_label
: drop_through_label
,
9338 if_true_label
? if_true_label
: drop_through_label
);
9339 /* In case the do_jump just above never jumps. */
9340 do_pending_stack_adjust ();
9341 emit_label (label1
);
9343 /* Now the ELSE-expression. */
9344 do_jump (TREE_OPERAND (exp
, 2),
9345 if_false_label
? if_false_label
: drop_through_label
,
9346 if_true_label
? if_true_label
: drop_through_label
);
9347 end_cleanup_deferral ();
9353 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9355 if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
9356 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
9358 tree exp0
= save_expr (TREE_OPERAND (exp
, 0));
9359 tree exp1
= save_expr (TREE_OPERAND (exp
, 1));
9362 (build (TRUTH_ANDIF_EXPR
, TREE_TYPE (exp
),
9363 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
9364 fold (build1 (REALPART_EXPR
,
9365 TREE_TYPE (inner_type
),
9367 fold (build1 (REALPART_EXPR
,
9368 TREE_TYPE (inner_type
),
9370 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
9371 fold (build1 (IMAGPART_EXPR
,
9372 TREE_TYPE (inner_type
),
9374 fold (build1 (IMAGPART_EXPR
,
9375 TREE_TYPE (inner_type
),
9377 if_false_label
, if_true_label
);
9380 else if (integer_zerop (TREE_OPERAND (exp
, 1)))
9381 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9383 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
9384 && !can_compare_p (TYPE_MODE (inner_type
), ccp_jump
))
9385 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
);
9387 do_compare_and_jump (exp
, EQ
, EQ
, if_false_label
, if_true_label
);
9393 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9395 if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
9396 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
9398 tree exp0
= save_expr (TREE_OPERAND (exp
, 0));
9399 tree exp1
= save_expr (TREE_OPERAND (exp
, 1));
9402 (build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
),
9403 fold (build (NE_EXPR
, TREE_TYPE (exp
),
9404 fold (build1 (REALPART_EXPR
,
9405 TREE_TYPE (inner_type
),
9407 fold (build1 (REALPART_EXPR
,
9408 TREE_TYPE (inner_type
),
9410 fold (build (NE_EXPR
, TREE_TYPE (exp
),
9411 fold (build1 (IMAGPART_EXPR
,
9412 TREE_TYPE (inner_type
),
9414 fold (build1 (IMAGPART_EXPR
,
9415 TREE_TYPE (inner_type
),
9417 if_false_label
, if_true_label
);
9420 else if (integer_zerop (TREE_OPERAND (exp
, 1)))
9421 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9423 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
9424 && !can_compare_p (TYPE_MODE (inner_type
), ccp_jump
))
9425 do_jump_by_parts_equality (exp
, if_true_label
, if_false_label
);
9427 do_compare_and_jump (exp
, NE
, NE
, if_false_label
, if_true_label
);
9432 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9433 if (GET_MODE_CLASS (mode
) == MODE_INT
9434 && ! can_compare_p (mode
, ccp_jump
))
9435 do_jump_by_parts_greater (exp
, 1, if_false_label
, if_true_label
);
9437 do_compare_and_jump (exp
, LT
, LTU
, if_false_label
, if_true_label
);
9441 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9442 if (GET_MODE_CLASS (mode
) == MODE_INT
9443 && ! can_compare_p (mode
, ccp_jump
))
9444 do_jump_by_parts_greater (exp
, 0, if_true_label
, if_false_label
);
9446 do_compare_and_jump (exp
, LE
, LEU
, if_false_label
, if_true_label
);
9450 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9451 if (GET_MODE_CLASS (mode
) == MODE_INT
9452 && ! can_compare_p (mode
, ccp_jump
))
9453 do_jump_by_parts_greater (exp
, 0, if_false_label
, if_true_label
);
9455 do_compare_and_jump (exp
, GT
, GTU
, if_false_label
, if_true_label
);
9459 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9460 if (GET_MODE_CLASS (mode
) == MODE_INT
9461 && ! can_compare_p (mode
, ccp_jump
))
9462 do_jump_by_parts_greater (exp
, 1, if_true_label
, if_false_label
);
9464 do_compare_and_jump (exp
, GE
, GEU
, if_false_label
, if_true_label
);
9469 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
9471 /* This is not needed any more and causes poor code since it causes
9472 comparisons and tests from non-SI objects to have different code
9474 /* Copy to register to avoid generating bad insns by cse
9475 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9476 if (!cse_not_expected
&& GET_CODE (temp
) == MEM
)
9477 temp
= copy_to_reg (temp
);
9479 do_pending_stack_adjust ();
9480 /* Do any postincrements in the expression that was tested. */
9483 if (GET_CODE (temp
) == CONST_INT
|| GET_CODE (temp
) == LABEL_REF
)
9485 rtx target
= temp
== const0_rtx
? if_false_label
: if_true_label
;
9489 else if (GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
9490 && ! can_compare_p (GET_MODE (temp
), ccp_jump
))
9491 /* Note swapping the labels gives us not-equal. */
9492 do_jump_by_parts_equality_rtx (temp
, if_true_label
, if_false_label
);
9493 else if (GET_MODE (temp
) != VOIDmode
)
9494 do_compare_rtx_and_jump (temp
, CONST0_RTX (GET_MODE (temp
)),
9495 NE
, TREE_UNSIGNED (TREE_TYPE (exp
)),
9496 GET_MODE (temp
), NULL_RTX
, 0,
9497 if_false_label
, if_true_label
);
9502 if (drop_through_label
)
9504 /* If do_jump produces code that might be jumped around,
9505 do any stack adjusts from that code, before the place
9506 where control merges in. */
9507 do_pending_stack_adjust ();
9508 emit_label (drop_through_label
);
9512 /* Given a comparison expression EXP for values too wide to be compared
9513 with one insn, test the comparison and jump to the appropriate label.
9514 The code of EXP is ignored; we always test GT if SWAP is 0,
9515 and LT if SWAP is 1. */
9518 do_jump_by_parts_greater (exp
, swap
, if_false_label
, if_true_label
)
9521 rtx if_false_label
, if_true_label
;
9523 rtx op0
= expand_expr (TREE_OPERAND (exp
, swap
), NULL_RTX
, VOIDmode
, 0);
9524 rtx op1
= expand_expr (TREE_OPERAND (exp
, !swap
), NULL_RTX
, VOIDmode
, 0);
9525 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9526 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9528 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
, if_true_label
);
9531 /* Compare OP0 with OP1, word at a time, in mode MODE.
9532 UNSIGNEDP says to do unsigned comparison.
9533 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9536 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
, if_true_label
)
9537 enum machine_mode mode
;
9540 rtx if_false_label
, if_true_label
;
9542 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
9543 rtx drop_through_label
= 0;
9546 if (! if_true_label
|| ! if_false_label
)
9547 drop_through_label
= gen_label_rtx ();
9548 if (! if_true_label
)
9549 if_true_label
= drop_through_label
;
9550 if (! if_false_label
)
9551 if_false_label
= drop_through_label
;
9553 /* Compare a word at a time, high order first. */
9554 for (i
= 0; i
< nwords
; i
++)
9556 rtx op0_word
, op1_word
;
9558 if (WORDS_BIG_ENDIAN
)
9560 op0_word
= operand_subword_force (op0
, i
, mode
);
9561 op1_word
= operand_subword_force (op1
, i
, mode
);
9565 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
9566 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
9569 /* All but high-order word must be compared as unsigned. */
9570 do_compare_rtx_and_jump (op0_word
, op1_word
, GT
,
9571 (unsignedp
|| i
> 0), word_mode
, NULL_RTX
, 0,
9572 NULL_RTX
, if_true_label
);
9574 /* Consider lower words only if these are equal. */
9575 do_compare_rtx_and_jump (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
9576 NULL_RTX
, 0, NULL_RTX
, if_false_label
);
9580 emit_jump (if_false_label
);
9581 if (drop_through_label
)
9582 emit_label (drop_through_label
);
9585 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9586 with one insn, test the comparison and jump to the appropriate label. */
9589 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
)
9591 rtx if_false_label
, if_true_label
;
9593 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
9594 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9595 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9596 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
9598 rtx drop_through_label
= 0;
9600 if (! if_false_label
)
9601 drop_through_label
= if_false_label
= gen_label_rtx ();
9603 for (i
= 0; i
< nwords
; i
++)
9604 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, mode
),
9605 operand_subword_force (op1
, i
, mode
),
9606 EQ
, TREE_UNSIGNED (TREE_TYPE (exp
)),
9607 word_mode
, NULL_RTX
, 0, if_false_label
,
9611 emit_jump (if_true_label
);
9612 if (drop_through_label
)
9613 emit_label (drop_through_label
);
9616 /* Jump according to whether OP0 is 0.
9617 We assume that OP0 has an integer mode that is too wide
9618 for the available compare insns. */
9621 do_jump_by_parts_equality_rtx (op0
, if_false_label
, if_true_label
)
9623 rtx if_false_label
, if_true_label
;
9625 int nwords
= GET_MODE_SIZE (GET_MODE (op0
)) / UNITS_PER_WORD
;
9628 rtx drop_through_label
= 0;
9630 /* The fastest way of doing this comparison on almost any machine is to
9631 "or" all the words and compare the result. If all have to be loaded
9632 from memory and this is a very wide item, it's possible this may
9633 be slower, but that's highly unlikely. */
9635 part
= gen_reg_rtx (word_mode
);
9636 emit_move_insn (part
, operand_subword_force (op0
, 0, GET_MODE (op0
)));
9637 for (i
= 1; i
< nwords
&& part
!= 0; i
++)
9638 part
= expand_binop (word_mode
, ior_optab
, part
,
9639 operand_subword_force (op0
, i
, GET_MODE (op0
)),
9640 part
, 1, OPTAB_WIDEN
);
9644 do_compare_rtx_and_jump (part
, const0_rtx
, EQ
, 1, word_mode
,
9645 NULL_RTX
, 0, if_false_label
, if_true_label
);
9650 /* If we couldn't do the "or" simply, do this with a series of compares. */
9651 if (! if_false_label
)
9652 drop_through_label
= if_false_label
= gen_label_rtx ();
9654 for (i
= 0; i
< nwords
; i
++)
9655 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, GET_MODE (op0
)),
9656 const0_rtx
, EQ
, 1, word_mode
, NULL_RTX
, 0,
9657 if_false_label
, NULL_RTX
);
9660 emit_jump (if_true_label
);
9662 if (drop_through_label
)
9663 emit_label (drop_through_label
);
9666 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9667 (including code to compute the values to be compared)
9668 and set (CC0) according to the result.
9669 The decision as to signed or unsigned comparison must be made by the caller.
9671 We force a stack adjustment unless there are currently
9672 things pushed on the stack that aren't yet used.
9674 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9677 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9678 size of MODE should be used. */
9681 compare_from_rtx (op0
, op1
, code
, unsignedp
, mode
, size
, align
)
9682 register rtx op0
, op1
;
9685 enum machine_mode mode
;
9691 /* If one operand is constant, make it the second one. Only do this
9692 if the other operand is not constant as well. */
9694 if ((CONSTANT_P (op0
) && ! CONSTANT_P (op1
))
9695 || (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) != CONST_INT
))
9700 code
= swap_condition (code
);
9705 op0
= force_not_mem (op0
);
9706 op1
= force_not_mem (op1
);
9709 do_pending_stack_adjust ();
9711 if (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) == CONST_INT
9712 && (tem
= simplify_relational_operation (code
, mode
, op0
, op1
)) != 0)
9716 /* There's no need to do this now that combine.c can eliminate lots of
9717 sign extensions. This can be less efficient in certain cases on other
9720 /* If this is a signed equality comparison, we can do it as an
9721 unsigned comparison since zero-extension is cheaper than sign
9722 extension and comparisons with zero are done as unsigned. This is
9723 the case even on machines that can do fast sign extension, since
9724 zero-extension is easier to combine with other operations than
9725 sign-extension is. If we are comparing against a constant, we must
9726 convert it to what it would look like unsigned. */
9727 if ((code
== EQ
|| code
== NE
) && ! unsignedp
9728 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
9730 if (GET_CODE (op1
) == CONST_INT
9731 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
9732 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
9737 emit_cmp_insn (op0
, op1
, code
, size
, mode
, unsignedp
, align
);
9739 return gen_rtx_fmt_ee (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
9742 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9743 The decision as to signed or unsigned comparison must be made by the caller.
9745 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9748 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9749 size of MODE should be used. */
9752 do_compare_rtx_and_jump (op0
, op1
, code
, unsignedp
, mode
, size
, align
,
9753 if_false_label
, if_true_label
)
9754 register rtx op0
, op1
;
9757 enum machine_mode mode
;
9760 rtx if_false_label
, if_true_label
;
9763 int dummy_true_label
= 0;
9765 /* Reverse the comparison if that is safe and we want to jump if it is
9767 if (! if_true_label
&& ! FLOAT_MODE_P (mode
))
9769 if_true_label
= if_false_label
;
9771 code
= reverse_condition (code
);
9774 /* If one operand is constant, make it the second one. Only do this
9775 if the other operand is not constant as well. */
9777 if ((CONSTANT_P (op0
) && ! CONSTANT_P (op1
))
9778 || (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) != CONST_INT
))
9783 code
= swap_condition (code
);
9788 op0
= force_not_mem (op0
);
9789 op1
= force_not_mem (op1
);
9792 do_pending_stack_adjust ();
9794 if (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) == CONST_INT
9795 && (tem
= simplify_relational_operation (code
, mode
, op0
, op1
)) != 0)
9797 if (tem
== const_true_rtx
)
9800 emit_jump (if_true_label
);
9805 emit_jump (if_false_label
);
9811 /* There's no need to do this now that combine.c can eliminate lots of
9812 sign extensions. This can be less efficient in certain cases on other
9815 /* If this is a signed equality comparison, we can do it as an
9816 unsigned comparison since zero-extension is cheaper than sign
9817 extension and comparisons with zero are done as unsigned. This is
9818 the case even on machines that can do fast sign extension, since
9819 zero-extension is easier to combine with other operations than
9820 sign-extension is. If we are comparing against a constant, we must
9821 convert it to what it would look like unsigned. */
9822 if ((code
== EQ
|| code
== NE
) && ! unsignedp
9823 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
9825 if (GET_CODE (op1
) == CONST_INT
9826 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
9827 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
9832 if (! if_true_label
)
9834 dummy_true_label
= 1;
9835 if_true_label
= gen_label_rtx ();
9838 emit_cmp_and_jump_insns (op0
, op1
, code
, size
, mode
, unsignedp
, align
,
9842 emit_jump (if_false_label
);
9843 if (dummy_true_label
)
9844 emit_label (if_true_label
);
9847 /* Generate code for a comparison expression EXP (including code to compute
9848 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
9849 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
9850 generated code will drop through.
9851 SIGNED_CODE should be the rtx operation for this comparison for
9852 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9854 We force a stack adjustment unless there are currently
9855 things pushed on the stack that aren't yet used. */
9858 do_compare_and_jump (exp
, signed_code
, unsigned_code
, if_false_label
,
9861 enum rtx_code signed_code
, unsigned_code
;
9862 rtx if_false_label
, if_true_label
;
9865 register rtx op0
, op1
;
9867 register enum machine_mode mode
;
9871 /* Don't crash if the comparison was erroneous. */
9872 op0
= expand_expr_unaligned (TREE_OPERAND (exp
, 0), &align0
);
9873 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
9876 op1
= expand_expr_unaligned (TREE_OPERAND (exp
, 1), &align1
);
9877 type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9878 mode
= TYPE_MODE (type
);
9879 unsignedp
= TREE_UNSIGNED (type
);
9880 code
= unsignedp
? unsigned_code
: signed_code
;
9882 #ifdef HAVE_canonicalize_funcptr_for_compare
9883 /* If function pointers need to be "canonicalized" before they can
9884 be reliably compared, then canonicalize them. */
9885 if (HAVE_canonicalize_funcptr_for_compare
9886 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
9887 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
9890 rtx new_op0
= gen_reg_rtx (mode
);
9892 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0
, op0
));
9896 if (HAVE_canonicalize_funcptr_for_compare
9897 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
9898 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
9901 rtx new_op1
= gen_reg_rtx (mode
);
9903 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1
, op1
));
9908 /* Do any postincrements in the expression that was tested. */
9911 do_compare_rtx_and_jump (op0
, op1
, code
, unsignedp
, mode
,
9913 ? expr_size (TREE_OPERAND (exp
, 0)) : NULL_RTX
),
9914 MIN (align0
, align1
) / BITS_PER_UNIT
,
9915 if_false_label
, if_true_label
);
9918 /* Generate code to calculate EXP using a store-flag instruction
9919 and return an rtx for the result. EXP is either a comparison
9920 or a TRUTH_NOT_EXPR whose operand is a comparison.
9922 If TARGET is nonzero, store the result there if convenient.
9924 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
9927 Return zero if there is no suitable set-flag instruction
9928 available on this machine.
9930 Once expand_expr has been called on the arguments of the comparison,
9931 we are committed to doing the store flag, since it is not safe to
9932 re-evaluate the expression. We emit the store-flag insn by calling
9933 emit_store_flag, but only expand the arguments if we have a reason
9934 to believe that emit_store_flag will be successful. If we think that
9935 it will, but it isn't, we have to simulate the store-flag with a
9936 set/jump/set sequence. */
9939 do_store_flag (exp
, target
, mode
, only_cheap
)
9942 enum machine_mode mode
;
9946 tree arg0
, arg1
, type
;
9948 enum machine_mode operand_mode
;
9952 enum insn_code icode
;
9953 rtx subtarget
= target
;
9956 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9957 result at the end. We can't simply invert the test since it would
9958 have already been inverted if it were valid. This case occurs for
9959 some floating-point comparisons. */
9961 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
9962 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
9964 arg0
= TREE_OPERAND (exp
, 0);
9965 arg1
= TREE_OPERAND (exp
, 1);
9966 type
= TREE_TYPE (arg0
);
9967 operand_mode
= TYPE_MODE (type
);
9968 unsignedp
= TREE_UNSIGNED (type
);
9970 /* We won't bother with BLKmode store-flag operations because it would mean
9971 passing a lot of information to emit_store_flag. */
9972 if (operand_mode
== BLKmode
)
9975 /* We won't bother with store-flag operations involving function pointers
9976 when function pointers must be canonicalized before comparisons. */
9977 #ifdef HAVE_canonicalize_funcptr_for_compare
9978 if (HAVE_canonicalize_funcptr_for_compare
9979 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
9980 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
9982 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
9983 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
9984 == FUNCTION_TYPE
))))
9991 /* Get the rtx comparison code to use. We know that EXP is a comparison
9992 operation of some type. Some comparisons against 1 and -1 can be
9993 converted to comparisons with zero. Do so here so that the tests
9994 below will be aware that we have a comparison with zero. These
9995 tests will not catch constants in the first operand, but constants
9996 are rarely passed as the first operand. */
9998 switch (TREE_CODE (exp
))
10007 if (integer_onep (arg1
))
10008 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
10010 code
= unsignedp
? LTU
: LT
;
10013 if (! unsignedp
&& integer_all_onesp (arg1
))
10014 arg1
= integer_zero_node
, code
= LT
;
10016 code
= unsignedp
? LEU
: LE
;
10019 if (! unsignedp
&& integer_all_onesp (arg1
))
10020 arg1
= integer_zero_node
, code
= GE
;
10022 code
= unsignedp
? GTU
: GT
;
10025 if (integer_onep (arg1
))
10026 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
10028 code
= unsignedp
? GEU
: GE
;
10034 /* Put a constant second. */
10035 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
10037 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
10038 code
= swap_condition (code
);
10041 /* If this is an equality or inequality test of a single bit, we can
10042 do this by shifting the bit being tested to the low-order bit and
10043 masking the result with the constant 1. If the condition was EQ,
10044 we xor it with 1. This does not require an scc insn and is faster
10045 than an scc insn even if we have it. */
10047 if ((code
== NE
|| code
== EQ
)
10048 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
10049 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
10051 tree inner
= TREE_OPERAND (arg0
, 0);
10052 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
10055 /* If INNER is a right shift of a constant and it plus BITNUM does
10056 not overflow, adjust BITNUM and INNER. */
10058 if (TREE_CODE (inner
) == RSHIFT_EXPR
10059 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
10060 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
10061 && (bitnum
+ TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1))
10062 < TYPE_PRECISION (type
)))
10064 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
10065 inner
= TREE_OPERAND (inner
, 0);
10068 /* If we are going to be able to omit the AND below, we must do our
10069 operations as unsigned. If we must use the AND, we have a choice.
10070 Normally unsigned is faster, but for some machines signed is. */
10071 ops_unsignedp
= (bitnum
== TYPE_PRECISION (type
) - 1 ? 1
10072 #ifdef LOAD_EXTEND_OP
10073 : (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
? 0 : 1)
10079 if (subtarget
== 0 || GET_CODE (subtarget
) != REG
10080 || GET_MODE (subtarget
) != operand_mode
10081 || ! safe_from_p (subtarget
, inner
, 1))
10084 op0
= expand_expr (inner
, subtarget
, VOIDmode
, 0);
10087 op0
= expand_shift (RSHIFT_EXPR
, GET_MODE (op0
), op0
,
10088 size_int (bitnum
), subtarget
, ops_unsignedp
);
10090 if (GET_MODE (op0
) != mode
)
10091 op0
= convert_to_mode (mode
, op0
, ops_unsignedp
);
10093 if ((code
== EQ
&& ! invert
) || (code
== NE
&& invert
))
10094 op0
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
, subtarget
,
10095 ops_unsignedp
, OPTAB_LIB_WIDEN
);
10097 /* Put the AND last so it can combine with more things. */
10098 if (bitnum
!= TYPE_PRECISION (type
) - 1)
10099 op0
= expand_and (op0
, const1_rtx
, subtarget
);
10104 /* Now see if we are likely to be able to do this. Return if not. */
10105 if (! can_compare_p (operand_mode
, ccp_store_flag
))
10107 icode
= setcc_gen_code
[(int) code
];
10108 if (icode
== CODE_FOR_nothing
10109 || (only_cheap
&& insn_data
[(int) icode
].operand
[0].mode
!= mode
))
10111 /* We can only do this if it is one of the special cases that
10112 can be handled without an scc insn. */
10113 if ((code
== LT
&& integer_zerop (arg1
))
10114 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
10116 else if (BRANCH_COST
>= 0
10117 && ! only_cheap
&& (code
== NE
|| code
== EQ
)
10118 && TREE_CODE (type
) != REAL_TYPE
10119 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
10120 != CODE_FOR_nothing
)
10121 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
10122 != CODE_FOR_nothing
)))
10128 preexpand_calls (exp
);
10129 if (subtarget
== 0 || GET_CODE (subtarget
) != REG
10130 || GET_MODE (subtarget
) != operand_mode
10131 || ! safe_from_p (subtarget
, arg1
, 1))
10134 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, 0);
10135 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
10138 target
= gen_reg_rtx (mode
);
10140 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10141 because, if the emit_store_flag does anything it will succeed and
10142 OP0 and OP1 will not be used subsequently. */
10144 result
= emit_store_flag (target
, code
,
10145 queued_subexp_p (op0
) ? copy_rtx (op0
) : op0
,
10146 queued_subexp_p (op1
) ? copy_rtx (op1
) : op1
,
10147 operand_mode
, unsignedp
, 1);
10152 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
10153 result
, 0, OPTAB_LIB_WIDEN
);
10157 /* If this failed, we have to do this with set/compare/jump/set code. */
10158 if (GET_CODE (target
) != REG
10159 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
10160 target
= gen_reg_rtx (GET_MODE (target
));
10162 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
10163 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
10164 operand_mode
, NULL_RTX
, 0);
10165 if (GET_CODE (result
) == CONST_INT
)
10166 return (((result
== const0_rtx
&& ! invert
)
10167 || (result
!= const0_rtx
&& invert
))
10168 ? const0_rtx
: const1_rtx
);
10170 label
= gen_label_rtx ();
10171 if (bcc_gen_fctn
[(int) code
] == 0)
10174 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
10175 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
10176 emit_label (label
);
10181 /* Generate a tablejump instruction (used for switch statements). */
10183 #ifdef HAVE_tablejump
10185 /* INDEX is the value being switched on, with the lowest value
10186 in the table already subtracted.
10187 MODE is its expected mode (needed if INDEX is constant).
10188 RANGE is the length of the jump table.
10189 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10191 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10192 index value is out of range. */
10195 do_tablejump (index
, mode
, range
, table_label
, default_label
)
10196 rtx index
, range
, table_label
, default_label
;
10197 enum machine_mode mode
;
10199 register rtx temp
, vector
;
10201 /* Do an unsigned comparison (in the proper mode) between the index
10202 expression and the value which represents the length of the range.
10203 Since we just finished subtracting the lower bound of the range
10204 from the index expression, this comparison allows us to simultaneously
10205 check that the original index expression value is both greater than
10206 or equal to the minimum value of the range and less than or equal to
10207 the maximum value of the range. */
10209 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
10212 /* If index is in range, it must fit in Pmode.
10213 Convert to Pmode so we can index with it. */
10215 index
= convert_to_mode (Pmode
, index
, 1);
10217 /* Don't let a MEM slip thru, because then INDEX that comes
10218 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10219 and break_out_memory_refs will go to work on it and mess it up. */
10220 #ifdef PIC_CASE_VECTOR_ADDRESS
10221 if (flag_pic
&& GET_CODE (index
) != REG
)
10222 index
= copy_to_mode_reg (Pmode
, index
);
10225 /* If flag_force_addr were to affect this address
10226 it could interfere with the tricky assumptions made
10227 about addresses that contain label-refs,
10228 which may be valid only very near the tablejump itself. */
10229 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10230 GET_MODE_SIZE, because this indicates how large insns are. The other
10231 uses should all be Pmode, because they are addresses. This code
10232 could fail if addresses and insns are not the same size. */
10233 index
= gen_rtx_PLUS (Pmode
,
10234 gen_rtx_MULT (Pmode
, index
,
10235 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
10236 gen_rtx_LABEL_REF (Pmode
, table_label
));
10237 #ifdef PIC_CASE_VECTOR_ADDRESS
10239 index
= PIC_CASE_VECTOR_ADDRESS (index
);
10242 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
10243 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
10244 vector
= gen_rtx_MEM (CASE_VECTOR_MODE
, index
);
10245 RTX_UNCHANGING_P (vector
) = 1;
10246 convert_move (temp
, vector
, 0);
10248 emit_jump_insn (gen_tablejump (temp
, table_label
));
10250 /* If we are generating PIC code or if the table is PC-relative, the
10251 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10252 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
10256 #endif /* HAVE_tablejump */