1 /* Subroutines for insn-output.c for Intel X86.
2 Copyright (C) 1988, 92, 94, 95, 96, 97, 1998 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
26 #include "hard-reg-set.h"
28 #include "insn-config.h"
29 #include "conditions.h"
30 #include "insn-flags.h"
32 #include "insn-attr.h"
38 #ifdef EXTRA_CONSTRAINT
39 /* If EXTRA_CONSTRAINT is defined, then the 'S'
40 constraint in REG_CLASS_FROM_LETTER will no longer work, and various
41 asm statements that need 'S' for class SIREG will break. */
42 error EXTRA_CONSTRAINT conflicts with S constraint letter
43 /* The previous line used to be #error, but some compilers barf
44 even if the conditional was untrue. */
47 #ifndef CHECK_STACK_LIMIT
48 #define CHECK_STACK_LIMIT -1
51 /* Type of an operand for ix86_{binary,unary}_operator_ok */
59 /* Processor costs (relative to an add) */
60 struct processor_costs i386_cost
= { /* 386 specific costs */
61 1, /* cost of an add instruction */
62 1, /* cost of a lea instruction */
63 3, /* variable shift costs */
64 2, /* constant shift costs */
65 6, /* cost of starting a multiply */
66 1, /* cost of multiply per each bit set */
67 23 /* cost of a divide/mod */
70 struct processor_costs i486_cost
= { /* 486 specific costs */
71 1, /* cost of an add instruction */
72 1, /* cost of a lea instruction */
73 3, /* variable shift costs */
74 2, /* constant shift costs */
75 12, /* cost of starting a multiply */
76 1, /* cost of multiply per each bit set */
77 40 /* cost of a divide/mod */
80 struct processor_costs pentium_cost
= {
81 1, /* cost of an add instruction */
82 1, /* cost of a lea instruction */
83 4, /* variable shift costs */
84 1, /* constant shift costs */
85 11, /* cost of starting a multiply */
86 0, /* cost of multiply per each bit set */
87 25 /* cost of a divide/mod */
90 struct processor_costs pentiumpro_cost
= {
91 1, /* cost of an add instruction */
92 1, /* cost of a lea instruction */
93 3, /* variable shift costs */
94 1, /* constant shift costs */
95 4, /* cost of starting a multiply */
96 0, /* cost of multiply per each bit set */
97 17 /* cost of a divide/mod */
100 struct processor_costs
*ix86_cost
= &pentium_cost
;
102 #define AT_BP(mode) (gen_rtx_MEM ((mode), frame_pointer_rtx))
104 extern FILE *asm_out_file
;
105 extern char *strcat ();
107 static void ix86_epilogue
PROTO((int));
108 static void ix86_prologue
PROTO((int));
110 char *singlemove_string ();
111 char *output_move_const_single ();
112 char *output_fp_cc0_set ();
114 char *hi_reg_name
[] = HI_REGISTER_NAMES
;
115 char *qi_reg_name
[] = QI_REGISTER_NAMES
;
116 char *qi_high_reg_name
[] = QI_HIGH_REGISTER_NAMES
;
118 /* Array of the smallest class containing reg number REGNO, indexed by
119 REGNO. Used by REGNO_REG_CLASS in i386.h. */
121 enum reg_class regclass_map
[FIRST_PSEUDO_REGISTER
] =
124 AREG
, DREG
, CREG
, BREG
,
126 SIREG
, DIREG
, INDEX_REGS
, GENERAL_REGS
,
128 FP_TOP_REG
, FP_SECOND_REG
, FLOAT_REGS
, FLOAT_REGS
,
129 FLOAT_REGS
, FLOAT_REGS
, FLOAT_REGS
, FLOAT_REGS
,
134 /* Test and compare insns in i386.md store the information needed to
135 generate branch and scc insns here. */
137 struct rtx_def
*i386_compare_op0
= NULL_RTX
;
138 struct rtx_def
*i386_compare_op1
= NULL_RTX
;
139 struct rtx_def
*(*i386_compare_gen
)(), *(*i386_compare_gen_eq
)();
141 /* which cpu are we scheduling for */
142 enum processor_type ix86_cpu
;
144 /* which instruction set architecture to use. */
147 /* Strings to hold which cpu and instruction set architecture to use. */
148 char *ix86_cpu_string
; /* for -mcpu=<xxx> */
149 char *ix86_arch_string
; /* for -march=<xxx> */
151 /* Register allocation order */
152 char *i386_reg_alloc_order
;
153 static char regs_allocated
[FIRST_PSEUDO_REGISTER
];
155 /* # of registers to use to pass arguments. */
156 char *i386_regparm_string
;
158 /* i386_regparm_string as a number */
161 /* Alignment to use for loops and jumps: */
163 /* Power of two alignment for loops. */
164 char *i386_align_loops_string
;
166 /* Power of two alignment for non-loop jumps. */
167 char *i386_align_jumps_string
;
169 /* Values 1-5: see jump.c */
170 int i386_branch_cost
;
171 char *i386_branch_cost_string
;
173 /* Power of two alignment for functions. */
174 int i386_align_funcs
;
175 char *i386_align_funcs_string
;
177 /* Power of two alignment for loops. */
178 int i386_align_loops
;
180 /* Power of two alignment for non-loop jumps. */
181 int i386_align_jumps
;
183 /* Sometimes certain combinations of command options do not make
184 sense on a particular target machine. You can define a macro
185 `OVERRIDE_OPTIONS' to take account of this. This macro, if
186 defined, is executed once just after all the command options have
189 Don't use this macro to turn on various extra optimizations for
190 `-O'. That is what `OPTIMIZATION_OPTIONS' is for. */
201 char *name
; /* Canonical processor name. */
202 enum processor_type processor
; /* Processor type enum value. */
203 struct processor_costs
*cost
; /* Processor costs */
204 int target_enable
; /* Target flags to enable. */
205 int target_disable
; /* Target flags to disable. */
206 } processor_target_table
[]
207 = {{PROCESSOR_I386_STRING
, PROCESSOR_I386
, &i386_cost
, 0, 0},
208 {PROCESSOR_I486_STRING
, PROCESSOR_I486
, &i486_cost
, 0, 0},
209 {PROCESSOR_I586_STRING
, PROCESSOR_PENTIUM
, &pentium_cost
, 0, 0},
210 {PROCESSOR_PENTIUM_STRING
, PROCESSOR_PENTIUM
, &pentium_cost
, 0, 0},
211 {PROCESSOR_I686_STRING
, PROCESSOR_PENTIUMPRO
, &pentiumpro_cost
,
213 {PROCESSOR_PENTIUMPRO_STRING
, PROCESSOR_PENTIUMPRO
,
214 &pentiumpro_cost
, 0, 0}};
216 int ptt_size
= sizeof (processor_target_table
) / sizeof (struct ptt
);
218 #ifdef SUBTARGET_OVERRIDE_OPTIONS
219 SUBTARGET_OVERRIDE_OPTIONS
;
222 /* Validate registers in register allocation order. */
223 if (i386_reg_alloc_order
)
225 for (i
= 0; (ch
= i386_reg_alloc_order
[i
]) != '\0'; i
++)
229 case 'a': regno
= 0; break;
230 case 'd': regno
= 1; break;
231 case 'c': regno
= 2; break;
232 case 'b': regno
= 3; break;
233 case 'S': regno
= 4; break;
234 case 'D': regno
= 5; break;
235 case 'B': regno
= 6; break;
237 default: fatal ("Register '%c' is unknown", ch
);
240 if (regs_allocated
[regno
])
241 fatal ("Register '%c' already specified in allocation order", ch
);
243 regs_allocated
[regno
] = 1;
247 if (ix86_arch_string
== 0)
249 ix86_arch_string
= PROCESSOR_PENTIUM_STRING
;
250 if (ix86_cpu_string
== 0)
251 ix86_cpu_string
= PROCESSOR_DEFAULT_STRING
;
254 for (i
= 0; i
< ptt_size
; i
++)
255 if (! strcmp (ix86_arch_string
, processor_target_table
[i
].name
))
257 ix86_arch
= processor_target_table
[i
].processor
;
258 if (ix86_cpu_string
== 0)
259 ix86_cpu_string
= processor_target_table
[i
].name
;
265 error ("bad value (%s) for -march= switch", ix86_arch_string
);
266 ix86_arch_string
= PROCESSOR_PENTIUM_STRING
;
267 ix86_arch
= PROCESSOR_DEFAULT
;
270 if (ix86_cpu_string
== 0)
271 ix86_cpu_string
= PROCESSOR_DEFAULT_STRING
;
273 for (j
= 0; j
< ptt_size
; j
++)
274 if (! strcmp (ix86_cpu_string
, processor_target_table
[j
].name
))
276 ix86_cpu
= processor_target_table
[j
].processor
;
277 ix86_cost
= processor_target_table
[j
].cost
;
278 if (i
> j
&& (int) ix86_arch
>= (int) PROCESSOR_PENTIUMPRO
)
279 error ("-mcpu=%s does not support -march=%s",
280 ix86_cpu_string
, ix86_arch_string
);
282 target_flags
|= processor_target_table
[j
].target_enable
;
283 target_flags
&= ~processor_target_table
[j
].target_disable
;
289 error ("bad value (%s) for -mcpu= switch", ix86_cpu_string
);
290 ix86_cpu_string
= PROCESSOR_DEFAULT_STRING
;
291 ix86_cpu
= PROCESSOR_DEFAULT
;
294 /* Validate -mregparm= value. */
295 if (i386_regparm_string
)
297 i386_regparm
= atoi (i386_regparm_string
);
298 if (i386_regparm
< 0 || i386_regparm
> REGPARM_MAX
)
299 fatal ("-mregparm=%d is not between 0 and %d",
300 i386_regparm
, REGPARM_MAX
);
303 /* The 486 suffers more from non-aligned cache line fills, and the
304 larger code size results in a larger cache foot-print and more misses.
305 The 486 has a 16 byte cache line, pentium and pentiumpro have a 32 byte
307 def_align
= (TARGET_486
) ? 4 : 2;
309 /* Validate -malign-loops= value, or provide default. */
310 if (i386_align_loops_string
)
312 i386_align_loops
= atoi (i386_align_loops_string
);
313 if (i386_align_loops
< 0 || i386_align_loops
> MAX_CODE_ALIGN
)
314 fatal ("-malign-loops=%d is not between 0 and %d",
315 i386_align_loops
, MAX_CODE_ALIGN
);
318 i386_align_loops
= 2;
320 /* Validate -malign-jumps= value, or provide default. */
321 if (i386_align_jumps_string
)
323 i386_align_jumps
= atoi (i386_align_jumps_string
);
324 if (i386_align_jumps
< 0 || i386_align_jumps
> MAX_CODE_ALIGN
)
325 fatal ("-malign-jumps=%d is not between 0 and %d",
326 i386_align_jumps
, MAX_CODE_ALIGN
);
329 i386_align_jumps
= def_align
;
331 /* Validate -malign-functions= value, or provide default. */
332 if (i386_align_funcs_string
)
334 i386_align_funcs
= atoi (i386_align_funcs_string
);
335 if (i386_align_funcs
< 0 || i386_align_funcs
> MAX_CODE_ALIGN
)
336 fatal ("-malign-functions=%d is not between 0 and %d",
337 i386_align_funcs
, MAX_CODE_ALIGN
);
340 i386_align_funcs
= def_align
;
342 /* Validate -mbranch-cost= value, or provide default. */
343 if (i386_branch_cost_string
)
345 i386_branch_cost
= atoi (i386_branch_cost_string
);
346 if (i386_branch_cost
< 0 || i386_branch_cost
> 5)
347 fatal ("-mbranch-cost=%d is not between 0 and 5",
351 i386_branch_cost
= 1;
353 /* Keep nonleaf frame pointers. */
354 if (TARGET_OMIT_LEAF_FRAME_POINTER
)
355 flag_omit_frame_pointer
= 1;
358 /* A C statement (sans semicolon) to choose the order in which to
359 allocate hard registers for pseudo-registers local to a basic
362 Store the desired register order in the array `reg_alloc_order'.
363 Element 0 should be the register to allocate first; element 1, the
364 next register; and so on.
366 The macro body should not assume anything about the contents of
367 `reg_alloc_order' before execution of the macro.
369 On most machines, it is not necessary to define this macro. */
372 order_regs_for_local_alloc ()
374 int i
, ch
, order
, regno
;
376 /* User specified the register allocation order. */
378 if (i386_reg_alloc_order
)
380 for (i
= order
= 0; (ch
= i386_reg_alloc_order
[i
]) != '\0'; i
++)
384 case 'a': regno
= 0; break;
385 case 'd': regno
= 1; break;
386 case 'c': regno
= 2; break;
387 case 'b': regno
= 3; break;
388 case 'S': regno
= 4; break;
389 case 'D': regno
= 5; break;
390 case 'B': regno
= 6; break;
393 reg_alloc_order
[order
++] = regno
;
396 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
398 if (! regs_allocated
[i
])
399 reg_alloc_order
[order
++] = i
;
403 /* If user did not specify a register allocation order, use natural order. */
406 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
407 reg_alloc_order
[i
] = i
;
412 optimization_options (level
)
415 /* For -O2 and beyond, turn off -fschedule-insns by default. It tends to
416 make the problem with not enough registers even worse. */
417 #ifdef INSN_SCHEDULING
419 flag_schedule_insns
= 0;
423 /* Sign-extend a 16-bit constant */
426 i386_sext16_if_const (op
)
429 if (GET_CODE (op
) == CONST_INT
)
431 HOST_WIDE_INT val
= INTVAL (op
);
432 HOST_WIDE_INT sext_val
;
434 sext_val
= val
| ~0xffff;
436 sext_val
= val
& 0xffff;
438 op
= GEN_INT (sext_val
);
443 /* Return nonzero if the rtx is aligned */
446 i386_aligned_reg_p (regno
)
449 return (regno
== STACK_POINTER_REGNUM
450 || (! flag_omit_frame_pointer
&& regno
== FRAME_POINTER_REGNUM
));
457 /* Registers and immediate operands are always "aligned". */
458 if (GET_CODE (op
) != MEM
)
461 /* Don't even try to do any aligned optimizations with volatiles. */
462 if (MEM_VOLATILE_P (op
))
465 /* Get address of memory operand. */
468 switch (GET_CODE (op
))
475 /* Match "reg + offset" */
477 if (GET_CODE (XEXP (op
, 1)) != CONST_INT
)
479 if (INTVAL (XEXP (op
, 1)) & 3)
483 if (GET_CODE (op
) != REG
)
486 /* ... fall through ... */
489 return i386_aligned_reg_p (REGNO (op
));
495 /* Return nonzero if INSN looks like it won't compute useful cc bits
496 as a side effect. This information is only a hint. */
499 i386_cc_probably_useless_p (insn
)
502 return ! next_cc0_user (insn
);
505 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
506 attribute for DECL. The attributes in ATTRIBUTES have previously been
510 i386_valid_decl_attribute_p (decl
, attributes
, identifier
, args
)
519 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
520 attribute for TYPE. The attributes in ATTRIBUTES have previously been
524 i386_valid_type_attribute_p (type
, attributes
, identifier
, args
)
530 if (TREE_CODE (type
) != FUNCTION_TYPE
531 && TREE_CODE (type
) != FIELD_DECL
532 && TREE_CODE (type
) != TYPE_DECL
)
535 /* Stdcall attribute says callee is responsible for popping arguments
536 if they are not variable. */
537 if (is_attribute_p ("stdcall", identifier
))
538 return (args
== NULL_TREE
);
540 /* Cdecl attribute says the callee is a normal C declaration. */
541 if (is_attribute_p ("cdecl", identifier
))
542 return (args
== NULL_TREE
);
544 /* Regparm attribute specifies how many integer arguments are to be
545 passed in registers. */
546 if (is_attribute_p ("regparm", identifier
))
550 if (! args
|| TREE_CODE (args
) != TREE_LIST
551 || TREE_CHAIN (args
) != NULL_TREE
552 || TREE_VALUE (args
) == NULL_TREE
)
555 cst
= TREE_VALUE (args
);
556 if (TREE_CODE (cst
) != INTEGER_CST
)
559 if (TREE_INT_CST_HIGH (cst
) != 0
560 || TREE_INT_CST_LOW (cst
) < 0
561 || TREE_INT_CST_LOW (cst
) > REGPARM_MAX
)
570 /* Return 0 if the attributes for two types are incompatible, 1 if they
571 are compatible, and 2 if they are nearly compatible (which causes a
572 warning to be generated). */
575 i386_comp_type_attributes (type1
, type2
)
583 /* Value is the number of bytes of arguments automatically
584 popped when returning from a subroutine call.
585 FUNDECL is the declaration node of the function (as a tree),
586 FUNTYPE is the data type of the function (as a tree),
587 or for a library call it is an identifier node for the subroutine name.
588 SIZE is the number of bytes of arguments passed on the stack.
590 On the 80386, the RTD insn may be used to pop them if the number
591 of args is fixed, but if the number is variable then the caller
592 must pop them all. RTD can't be used for library calls now
593 because the library is compiled with the Unix compiler.
594 Use of RTD is a selectable option, since it is incompatible with
595 standard Unix calling sequences. If the option is not selected,
596 the caller must always pop the args.
598 The attribute stdcall is equivalent to RTD on a per module basis. */
601 i386_return_pops_args (fundecl
, funtype
, size
)
606 int rtd
= TARGET_RTD
&& (!fundecl
|| TREE_CODE (fundecl
) != IDENTIFIER_NODE
);
608 /* Cdecl functions override -mrtd, and never pop the stack. */
609 if (! lookup_attribute ("cdecl", TYPE_ATTRIBUTES (funtype
))) {
611 /* Stdcall functions will pop the stack if not variable args. */
612 if (lookup_attribute ("stdcall", TYPE_ATTRIBUTES (funtype
)))
616 && (TYPE_ARG_TYPES (funtype
) == NULL_TREE
617 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (funtype
)))
622 /* Lose any fake structure return argument. */
623 if (aggregate_value_p (TREE_TYPE (funtype
)))
624 return GET_MODE_SIZE (Pmode
);
630 /* Argument support functions. */
632 /* Initialize a variable CUM of type CUMULATIVE_ARGS
633 for a call to a function whose data type is FNTYPE.
634 For a library call, FNTYPE is 0. */
637 init_cumulative_args (cum
, fntype
, libname
)
638 CUMULATIVE_ARGS
*cum
; /* Argument info to initialize */
639 tree fntype
; /* tree ptr for function decl */
640 rtx libname
; /* SYMBOL_REF of library name or 0 */
642 static CUMULATIVE_ARGS zero_cum
;
643 tree param
, next_param
;
645 if (TARGET_DEBUG_ARG
)
647 fprintf (stderr
, "\ninit_cumulative_args (");
649 fprintf (stderr
, "fntype code = %s, ret code = %s",
650 tree_code_name
[(int) TREE_CODE (fntype
)],
651 tree_code_name
[(int) TREE_CODE (TREE_TYPE (fntype
))]);
653 fprintf (stderr
, "no fntype");
656 fprintf (stderr
, ", libname = %s", XSTR (libname
, 0));
661 /* Set up the number of registers to use for passing arguments. */
662 cum
->nregs
= i386_regparm
;
665 tree attr
= lookup_attribute ("regparm", TYPE_ATTRIBUTES (fntype
));
668 cum
->nregs
= TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (attr
)));
671 /* Determine if this function has variable arguments. This is
672 indicated by the last argument being 'void_type_mode' if there
673 are no variable arguments. If there are variable arguments, then
674 we won't pass anything in registers */
678 for (param
= (fntype
) ? TYPE_ARG_TYPES (fntype
) : 0;
679 param
!= 0; param
= next_param
)
681 next_param
= TREE_CHAIN (param
);
682 if (next_param
== 0 && TREE_VALUE (param
) != void_type_node
)
687 if (TARGET_DEBUG_ARG
)
688 fprintf (stderr
, ", nregs=%d )\n", cum
->nregs
);
693 /* Update the data in CUM to advance over an argument
694 of mode MODE and data type TYPE.
695 (TYPE is null for libcalls where that information may not be available.) */
698 function_arg_advance (cum
, mode
, type
, named
)
699 CUMULATIVE_ARGS
*cum
; /* current arg information */
700 enum machine_mode mode
; /* current arg mode */
701 tree type
; /* type of the argument or 0 if lib support */
702 int named
; /* whether or not the argument was named */
705 = (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
706 int words
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
708 if (TARGET_DEBUG_ARG
)
710 "function_adv (sz=%d, wds=%2d, nregs=%d, mode=%s, named=%d)\n\n",
711 words
, cum
->words
, cum
->nregs
, GET_MODE_NAME (mode
), named
);
726 /* Define where to put the arguments to a function.
727 Value is zero to push the argument on the stack,
728 or a hard register in which to store the argument.
730 MODE is the argument's machine mode.
731 TYPE is the data type of the argument (as a tree).
732 This is null for libcalls where that information may
734 CUM is a variable of type CUMULATIVE_ARGS which gives info about
735 the preceding args and about the function being called.
736 NAMED is nonzero if this argument is a named parameter
737 (otherwise it is an extra parameter matching an ellipsis). */
740 function_arg (cum
, mode
, type
, named
)
741 CUMULATIVE_ARGS
*cum
; /* current arg information */
742 enum machine_mode mode
; /* current arg mode */
743 tree type
; /* type of the argument or 0 if lib support */
744 int named
; /* != 0 for normal args, == 0 for ... args */
748 = (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
749 int words
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
753 /* For now, pass fp/complex values on the stack. */
762 if (words
<= cum
->nregs
)
763 ret
= gen_rtx_REG (mode
, cum
->regno
);
767 if (TARGET_DEBUG_ARG
)
770 "function_arg (size=%d, wds=%2d, nregs=%d, mode=%4s, named=%d",
771 words
, cum
->words
, cum
->nregs
, GET_MODE_NAME (mode
), named
);
774 fprintf (stderr
, ", reg=%%e%s", reg_names
[ REGNO(ret
) ]);
776 fprintf (stderr
, ", stack");
778 fprintf (stderr
, " )\n");
784 /* For an arg passed partly in registers and partly in memory,
785 this is the number of registers used.
786 For args passed entirely in registers or entirely in memory, zero. */
789 function_arg_partial_nregs (cum
, mode
, type
, named
)
790 CUMULATIVE_ARGS
*cum
; /* current arg information */
791 enum machine_mode mode
; /* current arg mode */
792 tree type
; /* type of the argument or 0 if lib support */
793 int named
; /* != 0 for normal args, == 0 for ... args */
798 /* Output an insn whose source is a 386 integer register. SRC is the
799 rtx for the register, and TEMPLATE is the op-code template. SRC may
800 be either SImode or DImode.
802 The template will be output with operands[0] as SRC, and operands[1]
803 as a pointer to the top of the 386 stack. So a call from floatsidf2
804 would look like this:
806 output_op_from_reg (operands[1], AS1 (fild%z0,%1));
808 where %z0 corresponds to the caller's operands[1], and is used to
809 emit the proper size suffix.
811 ??? Extend this to handle HImode - a 387 can load and store HImode
815 output_op_from_reg (src
, template)
820 int size
= GET_MODE_SIZE (GET_MODE (src
));
823 xops
[1] = AT_SP (Pmode
);
824 xops
[2] = GEN_INT (size
);
825 xops
[3] = stack_pointer_rtx
;
827 if (size
> UNITS_PER_WORD
)
831 if (size
> 2 * UNITS_PER_WORD
)
833 high
= gen_rtx_REG (SImode
, REGNO (src
) + 2);
834 output_asm_insn (AS1 (push
%L0
,%0), &high
);
837 high
= gen_rtx_REG (SImode
, REGNO (src
) + 1);
838 output_asm_insn (AS1 (push
%L0
,%0), &high
);
841 output_asm_insn (AS1 (push
%L0
,%0), &src
);
842 output_asm_insn (template, xops
);
843 output_asm_insn (AS2 (add
%L3
,%2,%3), xops
);
846 /* Output an insn to pop an value from the 387 top-of-stack to 386
847 register DEST. The 387 register stack is popped if DIES is true. If
848 the mode of DEST is an integer mode, a `fist' integer store is done,
849 otherwise a `fst' float store is done. */
852 output_to_reg (dest
, dies
, scratch_mem
)
858 int size
= GET_MODE_SIZE (GET_MODE (dest
));
861 xops
[0] = AT_SP (Pmode
);
863 xops
[0] = scratch_mem
;
865 xops
[1] = stack_pointer_rtx
;
866 xops
[2] = GEN_INT (size
);
870 output_asm_insn (AS2 (sub
%L1
,%2,%1), xops
);
872 if (GET_MODE_CLASS (GET_MODE (dest
)) == MODE_INT
)
875 output_asm_insn (AS1 (fistp
%z3
,%y0
), xops
);
877 output_asm_insn (AS1 (fist
%z3
,%y0
), xops
);
880 else if (GET_MODE_CLASS (GET_MODE (dest
)) == MODE_FLOAT
)
883 output_asm_insn (AS1 (fstp
%z3
,%y0
), xops
);
886 if (GET_MODE (dest
) == XFmode
)
888 output_asm_insn (AS1 (fstp
%z3
,%y0
), xops
);
889 output_asm_insn (AS1 (fld
%z3
,%y0
), xops
);
892 output_asm_insn (AS1 (fst
%z3
,%y0
), xops
);
900 output_asm_insn (AS1 (pop
%L0
,%0), &dest
);
902 output_asm_insn (AS2 (mov
%L0
,%0,%3), xops
);
905 if (size
> UNITS_PER_WORD
)
907 dest
= gen_rtx_REG (SImode
, REGNO (dest
) + 1);
909 output_asm_insn (AS1 (pop
%L0
,%0), &dest
);
912 xops
[0] = adj_offsettable_operand (xops
[0], 4);
914 output_asm_insn (AS2 (mov
%L0
,%0,%3), xops
);
917 if (size
> 2 * UNITS_PER_WORD
)
919 dest
= gen_rtx_REG (SImode
, REGNO (dest
) + 1);
921 output_asm_insn (AS1 (pop
%L0
,%0), &dest
);
924 xops
[0] = adj_offsettable_operand (xops
[0], 4);
925 output_asm_insn (AS2 (mov
%L0
,%0,%3), xops
);
932 singlemove_string (operands
)
936 if (GET_CODE (operands
[0]) == MEM
937 && GET_CODE (x
= XEXP (operands
[0], 0)) == PRE_DEC
)
939 if (XEXP (x
, 0) != stack_pointer_rtx
)
943 else if (GET_CODE (operands
[1]) == CONST_DOUBLE
)
944 return output_move_const_single (operands
);
945 else if (GET_CODE (operands
[0]) == REG
|| GET_CODE (operands
[1]) == REG
)
946 return AS2 (mov
%L0
,%1,%0);
947 else if (CONSTANT_P (operands
[1]))
948 return AS2 (mov
%L0
,%1,%0);
951 output_asm_insn ("push%L1 %1", operands
);
956 /* Return a REG that occurs in ADDR with coefficient 1.
957 ADDR can be effectively incremented by incrementing REG. */
963 while (GET_CODE (addr
) == PLUS
)
965 if (GET_CODE (XEXP (addr
, 0)) == REG
)
966 addr
= XEXP (addr
, 0);
967 else if (GET_CODE (XEXP (addr
, 1)) == REG
)
968 addr
= XEXP (addr
, 1);
969 else if (CONSTANT_P (XEXP (addr
, 0)))
970 addr
= XEXP (addr
, 1);
971 else if (CONSTANT_P (XEXP (addr
, 1)))
972 addr
= XEXP (addr
, 0);
977 if (GET_CODE (addr
) == REG
)
982 /* Output an insn to add the constant N to the register X. */
993 output_asm_insn (AS1 (dec
%L0
,%0), xops
);
995 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
996 else if (n
< 0 || n
== 128)
998 xops
[1] = GEN_INT (-n
);
999 output_asm_insn (AS2 (sub
%L0
,%1,%0), xops
);
1003 xops
[1] = GEN_INT (n
);
1004 output_asm_insn (AS2 (add
%L0
,%1,%0), xops
);
1008 /* Output assembler code to perform a doubleword move insn
1009 with operands OPERANDS. */
1012 output_move_double (operands
)
1015 enum {REGOP
, OFFSOP
, MEMOP
, PUSHOP
, POPOP
, CNSTOP
, RNDOP
} optype0
, optype1
;
1019 rtx addreg0
= 0, addreg1
= 0;
1020 int dest_overlapped_low
= 0;
1021 int size
= GET_MODE_SIZE (GET_MODE (operands
[0]));
1026 /* First classify both operands. */
1028 if (REG_P (operands
[0]))
1030 else if (offsettable_memref_p (operands
[0]))
1032 else if (GET_CODE (XEXP (operands
[0], 0)) == POST_INC
)
1034 else if (GET_CODE (XEXP (operands
[0], 0)) == PRE_DEC
)
1036 else if (GET_CODE (operands
[0]) == MEM
)
1041 if (REG_P (operands
[1]))
1043 else if (CONSTANT_P (operands
[1]))
1045 else if (offsettable_memref_p (operands
[1]))
1047 else if (GET_CODE (XEXP (operands
[1], 0)) == POST_INC
)
1049 else if (GET_CODE (XEXP (operands
[1], 0)) == PRE_DEC
)
1051 else if (GET_CODE (operands
[1]) == MEM
)
1056 /* Check for the cases that the operand constraints are not
1057 supposed to allow to happen. Abort if we get one,
1058 because generating code for these cases is painful. */
1060 if (optype0
== RNDOP
|| optype1
== RNDOP
)
1063 /* If one operand is decrementing and one is incrementing
1064 decrement the former register explicitly
1065 and change that operand into ordinary indexing. */
1067 if (optype0
== PUSHOP
&& optype1
== POPOP
)
1069 /* ??? Can this ever happen on i386? */
1070 operands
[0] = XEXP (XEXP (operands
[0], 0), 0);
1071 asm_add (-size
, operands
[0]);
1072 if (GET_MODE (operands
[1]) == XFmode
)
1073 operands
[0] = gen_rtx_MEM (XFmode
, operands
[0]);
1074 else if (GET_MODE (operands
[0]) == DFmode
)
1075 operands
[0] = gen_rtx_MEM (DFmode
, operands
[0]);
1077 operands
[0] = gen_rtx_MEM (DImode
, operands
[0]);
1081 if (optype0
== POPOP
&& optype1
== PUSHOP
)
1083 /* ??? Can this ever happen on i386? */
1084 operands
[1] = XEXP (XEXP (operands
[1], 0), 0);
1085 asm_add (-size
, operands
[1]);
1086 if (GET_MODE (operands
[1]) == XFmode
)
1087 operands
[1] = gen_rtx_MEM (XFmode
, operands
[1]);
1088 else if (GET_MODE (operands
[1]) == DFmode
)
1089 operands
[1] = gen_rtx_MEM (DFmode
, operands
[1]);
1091 operands
[1] = gen_rtx_MEM (DImode
, operands
[1]);
1095 /* If an operand is an unoffsettable memory ref, find a register
1096 we can increment temporarily to make it refer to the second word. */
1098 if (optype0
== MEMOP
)
1099 addreg0
= find_addr_reg (XEXP (operands
[0], 0));
1101 if (optype1
== MEMOP
)
1102 addreg1
= find_addr_reg (XEXP (operands
[1], 0));
1104 /* Ok, we can do one word at a time.
1105 Normally we do the low-numbered word first,
1106 but if either operand is autodecrementing then we
1107 do the high-numbered word first.
1109 In either case, set up in LATEHALF the operands to use
1110 for the high-numbered word and in some cases alter the
1111 operands in OPERANDS to be suitable for the low-numbered word. */
1115 if (optype0
== REGOP
)
1117 middlehalf
[0] = gen_rtx_REG (SImode
, REGNO (operands
[0]) + 1);
1118 latehalf
[0] = gen_rtx_REG (SImode
, REGNO (operands
[0]) + 2);
1120 else if (optype0
== OFFSOP
)
1122 middlehalf
[0] = adj_offsettable_operand (operands
[0], 4);
1123 latehalf
[0] = adj_offsettable_operand (operands
[0], 8);
1127 middlehalf
[0] = operands
[0];
1128 latehalf
[0] = operands
[0];
1131 if (optype1
== REGOP
)
1133 middlehalf
[1] = gen_rtx_REG (SImode
, REGNO (operands
[1]) + 1);
1134 latehalf
[1] = gen_rtx_REG (SImode
, REGNO (operands
[1]) + 2);
1136 else if (optype1
== OFFSOP
)
1138 middlehalf
[1] = adj_offsettable_operand (operands
[1], 4);
1139 latehalf
[1] = adj_offsettable_operand (operands
[1], 8);
1141 else if (optype1
== CNSTOP
)
1143 if (GET_CODE (operands
[1]) == CONST_DOUBLE
)
1145 REAL_VALUE_TYPE r
; long l
[3];
1147 REAL_VALUE_FROM_CONST_DOUBLE (r
, operands
[1]);
1148 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r
, l
);
1149 operands
[1] = GEN_INT (l
[0]);
1150 middlehalf
[1] = GEN_INT (l
[1]);
1151 latehalf
[1] = GEN_INT (l
[2]);
1153 else if (CONSTANT_P (operands
[1]))
1154 /* No non-CONST_DOUBLE constant should ever appear here. */
1159 middlehalf
[1] = operands
[1];
1160 latehalf
[1] = operands
[1];
1166 /* Size is not 12. */
1168 if (optype0
== REGOP
)
1169 latehalf
[0] = gen_rtx_REG (SImode
, REGNO (operands
[0]) + 1);
1170 else if (optype0
== OFFSOP
)
1171 latehalf
[0] = adj_offsettable_operand (operands
[0], 4);
1173 latehalf
[0] = operands
[0];
1175 if (optype1
== REGOP
)
1176 latehalf
[1] = gen_rtx_REG (SImode
, REGNO (operands
[1]) + 1);
1177 else if (optype1
== OFFSOP
)
1178 latehalf
[1] = adj_offsettable_operand (operands
[1], 4);
1179 else if (optype1
== CNSTOP
)
1180 split_double (operands
[1], &operands
[1], &latehalf
[1]);
1182 latehalf
[1] = operands
[1];
1185 /* If insn is effectively movd N (sp),-(sp) then we will do the
1186 high word first. We should use the adjusted operand 1
1187 (which is N+4 (sp) or N+8 (sp))
1188 for the low word and middle word as well,
1189 to compensate for the first decrement of sp. */
1190 if (optype0
== PUSHOP
1191 && REGNO (XEXP (XEXP (operands
[0], 0), 0)) == STACK_POINTER_REGNUM
1192 && reg_overlap_mentioned_p (stack_pointer_rtx
, operands
[1]))
1193 middlehalf
[1] = operands
[1] = latehalf
[1];
1195 /* For (set (reg:DI N) (mem:DI ... (reg:SI N) ...)),
1196 if the upper part of reg N does not appear in the MEM, arrange to
1197 emit the move late-half first. Otherwise, compute the MEM address
1198 into the upper part of N and use that as a pointer to the memory
1200 if (optype0
== REGOP
1201 && (optype1
== OFFSOP
|| optype1
== MEMOP
))
1203 if (reg_mentioned_p (operands
[0], XEXP (operands
[1], 0))
1204 && reg_mentioned_p (latehalf
[0], XEXP (operands
[1], 0)))
1206 /* If both halves of dest are used in the src memory address,
1207 compute the address into latehalf of dest. */
1209 xops
[0] = latehalf
[0];
1210 xops
[1] = XEXP (operands
[1], 0);
1211 output_asm_insn (AS2 (lea
%L0
,%a1
,%0), xops
);
1212 if (GET_MODE (operands
[1]) == XFmode
)
1214 operands
[1] = gen_rtx_MEM (XFmode
, latehalf
[0]);
1215 middlehalf
[1] = adj_offsettable_operand (operands
[1], size
-8);
1216 latehalf
[1] = adj_offsettable_operand (operands
[1], size
-4);
1220 operands
[1] = gen_rtx_MEM (DImode
, latehalf
[0]);
1221 latehalf
[1] = adj_offsettable_operand (operands
[1], size
-4);
1226 && reg_mentioned_p (middlehalf
[0], XEXP (operands
[1], 0)))
1228 /* Check for two regs used by both source and dest. */
1229 if (reg_mentioned_p (operands
[0], XEXP (operands
[1], 0))
1230 || reg_mentioned_p (latehalf
[0], XEXP (operands
[1], 0)))
1233 /* JRV says this can't happen: */
1234 if (addreg0
|| addreg1
)
1237 /* Only the middle reg conflicts; simply put it last. */
1238 output_asm_insn (singlemove_string (operands
), operands
);
1239 output_asm_insn (singlemove_string (latehalf
), latehalf
);
1240 output_asm_insn (singlemove_string (middlehalf
), middlehalf
);
1244 else if (reg_mentioned_p (operands
[0], XEXP (operands
[1], 0)))
1245 /* If the low half of dest is mentioned in the source memory
1246 address, the arrange to emit the move late half first. */
1247 dest_overlapped_low
= 1;
1250 /* If one or both operands autodecrementing,
1251 do the two words, high-numbered first. */
1253 /* Likewise, the first move would clobber the source of the second one,
1254 do them in the other order. This happens only for registers;
1255 such overlap can't happen in memory unless the user explicitly
1256 sets it up, and that is an undefined circumstance. */
1259 if (optype0
== PUSHOP
|| optype1
== PUSHOP
1260 || (optype0
== REGOP
&& optype1
== REGOP
1261 && REGNO (operands
[0]) == REGNO (latehalf
[1]))
1262 || dest_overlapped_low
)
1265 if (optype0
== PUSHOP
|| optype1
== PUSHOP
1266 || (optype0
== REGOP
&& optype1
== REGOP
1267 && ((middlehalf
[1] && REGNO (operands
[0]) == REGNO (middlehalf
[1]))
1268 || REGNO (operands
[0]) == REGNO (latehalf
[1])))
1269 || dest_overlapped_low
)
1271 /* Make any unoffsettable addresses point at high-numbered word. */
1273 asm_add (size
-4, addreg0
);
1275 asm_add (size
-4, addreg1
);
1278 output_asm_insn (singlemove_string (latehalf
), latehalf
);
1280 /* Undo the adds we just did. */
1282 asm_add (-4, addreg0
);
1284 asm_add (-4, addreg1
);
1288 output_asm_insn (singlemove_string (middlehalf
), middlehalf
);
1290 asm_add (-4, addreg0
);
1292 asm_add (-4, addreg1
);
1295 /* Do low-numbered word. */
1296 return singlemove_string (operands
);
1299 /* Normal case: do the two words, low-numbered first. */
1301 output_asm_insn (singlemove_string (operands
), operands
);
1303 /* Do the middle one of the three words for long double */
1307 asm_add (4, addreg0
);
1309 asm_add (4, addreg1
);
1311 output_asm_insn (singlemove_string (middlehalf
), middlehalf
);
1314 /* Make any unoffsettable addresses point at high-numbered word. */
1316 asm_add (4, addreg0
);
1318 asm_add (4, addreg1
);
1321 output_asm_insn (singlemove_string (latehalf
), latehalf
);
1323 /* Undo the adds we just did. */
1325 asm_add (4-size
, addreg0
);
1327 asm_add (4-size
, addreg1
);
1332 #define MAX_TMPS 2 /* max temporary registers used */
1334 /* Output the appropriate code to move push memory on the stack */
1337 output_move_pushmem (operands
, insn
, length
, tmp_start
, n_operands
)
1349 } tmp_info
[MAX_TMPS
];
1351 rtx src
= operands
[1];
1354 int stack_p
= reg_overlap_mentioned_p (stack_pointer_rtx
, src
);
1355 int stack_offset
= 0;
1359 if (! offsettable_memref_p (src
))
1360 fatal_insn ("Source is not offsettable", insn
);
1362 if ((length
& 3) != 0)
1363 fatal_insn ("Pushing non-word aligned size", insn
);
1365 /* Figure out which temporary registers we have available */
1366 for (i
= tmp_start
; i
< n_operands
; i
++)
1368 if (GET_CODE (operands
[i
]) == REG
)
1370 if (reg_overlap_mentioned_p (operands
[i
], src
))
1373 tmp_info
[ max_tmps
++ ].xops
[1] = operands
[i
];
1374 if (max_tmps
== MAX_TMPS
)
1380 for (offset
= length
- 4; offset
>= 0; offset
-= 4)
1382 xops
[0] = adj_offsettable_operand (src
, offset
+ stack_offset
);
1383 output_asm_insn (AS1(push
%L0
,%0), xops
);
1389 for (offset
= length
- 4; offset
>= 0; )
1391 for (num_tmps
= 0; num_tmps
< max_tmps
&& offset
>= 0; num_tmps
++)
1393 tmp_info
[num_tmps
].load
= AS2(mov
%L0
,%0,%1);
1394 tmp_info
[num_tmps
].push
= AS1(push
%L0
,%1);
1395 tmp_info
[num_tmps
].xops
[0]
1396 = adj_offsettable_operand (src
, offset
+ stack_offset
);
1400 for (i
= 0; i
< num_tmps
; i
++)
1401 output_asm_insn (tmp_info
[i
].load
, tmp_info
[i
].xops
);
1403 for (i
= 0; i
< num_tmps
; i
++)
1404 output_asm_insn (tmp_info
[i
].push
, tmp_info
[i
].xops
);
1407 stack_offset
+= 4*num_tmps
;
1413 /* Output the appropriate code to move data between two memory locations */
1416 output_move_memory (operands
, insn
, length
, tmp_start
, n_operands
)
1428 } tmp_info
[MAX_TMPS
];
1430 rtx dest
= operands
[0];
1431 rtx src
= operands
[1];
1432 rtx qi_tmp
= NULL_RTX
;
1438 if (GET_CODE (dest
) == MEM
1439 && GET_CODE (XEXP (dest
, 0)) == PRE_INC
1440 && XEXP (XEXP (dest
, 0), 0) == stack_pointer_rtx
)
1441 return output_move_pushmem (operands
, insn
, length
, tmp_start
, n_operands
);
1443 if (! offsettable_memref_p (src
))
1444 fatal_insn ("Source is not offsettable", insn
);
1446 if (! offsettable_memref_p (dest
))
1447 fatal_insn ("Destination is not offsettable", insn
);
1449 /* Figure out which temporary registers we have available */
1450 for (i
= tmp_start
; i
< n_operands
; i
++)
1452 if (GET_CODE (operands
[i
]) == REG
)
1454 if ((length
& 1) != 0 && qi_tmp
== 0 && QI_REG_P (operands
[i
]))
1455 qi_tmp
= operands
[i
];
1457 if (reg_overlap_mentioned_p (operands
[i
], dest
))
1458 fatal_insn ("Temporary register overlaps the destination", insn
);
1460 if (reg_overlap_mentioned_p (operands
[i
], src
))
1461 fatal_insn ("Temporary register overlaps the source", insn
);
1463 tmp_info
[max_tmps
++].xops
[2] = operands
[i
];
1464 if (max_tmps
== MAX_TMPS
)
1470 fatal_insn ("No scratch registers were found to do memory->memory moves",
1473 if ((length
& 1) != 0)
1476 fatal_insn ("No byte register found when moving odd # of bytes.",
1482 for (num_tmps
= 0; num_tmps
< max_tmps
; num_tmps
++)
1486 tmp_info
[num_tmps
].load
= AS2(mov
%L0
,%1,%2);
1487 tmp_info
[num_tmps
].store
= AS2(mov
%L0
,%2,%0);
1488 tmp_info
[num_tmps
].xops
[0]
1489 = adj_offsettable_operand (dest
, offset
);
1490 tmp_info
[num_tmps
].xops
[1]
1491 = adj_offsettable_operand (src
, offset
);
1497 else if (length
>= 2)
1499 tmp_info
[num_tmps
].load
= AS2(mov
%W0
,%1,%2);
1500 tmp_info
[num_tmps
].store
= AS2(mov
%W0
,%2,%0);
1501 tmp_info
[num_tmps
].xops
[0]
1502 = adj_offsettable_operand (dest
, offset
);
1503 tmp_info
[num_tmps
].xops
[1]
1504 = adj_offsettable_operand (src
, offset
);
1513 for (i
= 0; i
< num_tmps
; i
++)
1514 output_asm_insn (tmp_info
[i
].load
, tmp_info
[i
].xops
);
1516 for (i
= 0; i
< num_tmps
; i
++)
1517 output_asm_insn (tmp_info
[i
].store
, tmp_info
[i
].xops
);
1522 xops
[0] = adj_offsettable_operand (dest
, offset
);
1523 xops
[1] = adj_offsettable_operand (src
, offset
);
1525 output_asm_insn (AS2(mov
%B0
,%1,%2), xops
);
1526 output_asm_insn (AS2(mov
%B0
,%2,%0), xops
);
1533 standard_80387_constant_p (x
)
1536 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1541 if (setjmp (handler
))
1544 set_float_handler (handler
);
1545 REAL_VALUE_FROM_CONST_DOUBLE (d
, x
);
1546 is0
= REAL_VALUES_EQUAL (d
, dconst0
) && !REAL_VALUE_MINUS_ZERO (d
);
1547 is1
= REAL_VALUES_EQUAL (d
, dconst1
);
1548 set_float_handler (NULL_PTR
);
1556 /* Note that on the 80387, other constants, such as pi,
1557 are much slower to load as standard constants
1558 than to load from doubles in memory! */
1565 output_move_const_single (operands
)
1568 if (FP_REG_P (operands
[0]))
1570 int conval
= standard_80387_constant_p (operands
[1]);
1579 if (GET_CODE (operands
[1]) == CONST_DOUBLE
)
1581 REAL_VALUE_TYPE r
; long l
;
1583 if (GET_MODE (operands
[1]) == XFmode
)
1586 REAL_VALUE_FROM_CONST_DOUBLE (r
, operands
[1]);
1587 REAL_VALUE_TO_TARGET_SINGLE (r
, l
);
1588 operands
[1] = GEN_INT (l
);
1591 return singlemove_string (operands
);
1594 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
1595 reference and a constant. */
1598 symbolic_operand (op
, mode
)
1600 enum machine_mode mode
;
1602 switch (GET_CODE (op
))
1610 return ((GET_CODE (XEXP (op
, 0)) == SYMBOL_REF
1611 || GET_CODE (XEXP (op
, 0)) == LABEL_REF
)
1612 && GET_CODE (XEXP (op
, 1)) == CONST_INT
);
1619 /* Test for a valid operand for a call instruction.
1620 Don't allow the arg pointer register or virtual regs
1621 since they may change into reg + const, which the patterns
1622 can't handle yet. */
1625 call_insn_operand (op
, mode
)
1627 enum machine_mode mode
;
1629 if (GET_CODE (op
) == MEM
1630 && ((CONSTANT_ADDRESS_P (XEXP (op
, 0))
1631 /* This makes a difference for PIC. */
1632 && general_operand (XEXP (op
, 0), Pmode
))
1633 || (GET_CODE (XEXP (op
, 0)) == REG
1634 && XEXP (op
, 0) != arg_pointer_rtx
1635 && ! (REGNO (XEXP (op
, 0)) >= FIRST_PSEUDO_REGISTER
1636 && REGNO (XEXP (op
, 0)) <= LAST_VIRTUAL_REGISTER
))))
1642 /* Like call_insn_operand but allow (mem (symbol_ref ...))
1646 expander_call_insn_operand (op
, mode
)
1648 enum machine_mode mode
;
1650 if (GET_CODE (op
) == MEM
1651 && (CONSTANT_ADDRESS_P (XEXP (op
, 0))
1652 || (GET_CODE (XEXP (op
, 0)) == REG
1653 && XEXP (op
, 0) != arg_pointer_rtx
1654 && ! (REGNO (XEXP (op
, 0)) >= FIRST_PSEUDO_REGISTER
1655 && REGNO (XEXP (op
, 0)) <= LAST_VIRTUAL_REGISTER
))))
1661 /* Return 1 if OP is a comparison operator that can use the condition code
1662 generated by an arithmetic operation. */
1665 arithmetic_comparison_operator (op
, mode
)
1667 enum machine_mode mode
;
1671 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
1674 code
= GET_CODE (op
);
1675 if (GET_RTX_CLASS (code
) != '<')
1678 return (code
!= GT
&& code
!= LE
);
1681 /* Returns 1 if OP contains a symbol reference */
1684 symbolic_reference_mentioned_p (op
)
1690 if (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == LABEL_REF
)
1693 fmt
= GET_RTX_FORMAT (GET_CODE (op
));
1694 for (i
= GET_RTX_LENGTH (GET_CODE (op
)) - 1; i
>= 0; i
--)
1700 for (j
= XVECLEN (op
, i
) - 1; j
>= 0; j
--)
1701 if (symbolic_reference_mentioned_p (XVECEXP (op
, i
, j
)))
1705 else if (fmt
[i
] == 'e' && symbolic_reference_mentioned_p (XEXP (op
, i
)))
1712 /* Attempt to expand a binary operator. Make the expansion closer to the
1713 actual machine, then just general_operand, which will allow 3 separate
1714 memory references (one output, two input) in a single insn. Return
1715 whether the insn fails, or succeeds. */
1718 ix86_expand_binary_operator (code
, mode
, operands
)
1720 enum machine_mode mode
;
1727 /* Recognize <var1> = <value> <op> <var1> for commutative operators */
1728 if (GET_RTX_CLASS (code
) == 'c'
1729 && (rtx_equal_p (operands
[0], operands
[2])
1730 || immediate_operand (operands
[1], mode
)))
1732 rtx temp
= operands
[1];
1733 operands
[1] = operands
[2];
1737 /* If optimizing, copy to regs to improve CSE */
1738 if (TARGET_PSEUDO
&& optimize
1739 && ((reload_in_progress
| reload_completed
) == 0))
1741 if (GET_CODE (operands
[1]) == MEM
1742 && ! rtx_equal_p (operands
[0], operands
[1]))
1743 operands
[1] = force_reg (GET_MODE (operands
[1]), operands
[1]);
1745 if (GET_CODE (operands
[2]) == MEM
)
1746 operands
[2] = force_reg (GET_MODE (operands
[2]), operands
[2]);
1748 if (GET_CODE (operands
[1]) == CONST_INT
&& code
== MINUS
)
1750 rtx temp
= gen_reg_rtx (GET_MODE (operands
[0]));
1752 emit_move_insn (temp
, operands
[1]);
1758 if (!ix86_binary_operator_ok (code
, mode
, operands
))
1760 /* If not optimizing, try to make a valid insn (optimize code
1761 previously did this above to improve chances of CSE) */
1763 if ((! TARGET_PSEUDO
|| !optimize
)
1764 && ((reload_in_progress
| reload_completed
) == 0)
1765 && (GET_CODE (operands
[1]) == MEM
|| GET_CODE (operands
[2]) == MEM
))
1768 if (GET_CODE (operands
[1]) == MEM
1769 && ! rtx_equal_p (operands
[0], operands
[1]))
1771 operands
[1] = force_reg (GET_MODE (operands
[1]), operands
[1]);
1775 if (GET_CODE (operands
[2]) == MEM
)
1777 operands
[2] = force_reg (GET_MODE (operands
[2]), operands
[2]);
1781 if (GET_CODE (operands
[1]) == CONST_INT
&& code
== MINUS
)
1783 rtx temp
= gen_reg_rtx (GET_MODE (operands
[0]));
1785 emit_move_insn (temp
, operands
[1]);
1790 if (modified
&& ! ix86_binary_operator_ok (code
, mode
, operands
))
1800 /* Return TRUE or FALSE depending on whether the binary operator meets the
1801 appropriate constraints. */
1804 ix86_binary_operator_ok (code
, mode
, operands
)
1806 enum machine_mode mode
;
1809 return (GET_CODE (operands
[1]) != MEM
|| GET_CODE (operands
[2]) != MEM
)
1810 && (GET_CODE (operands
[1]) != CONST_INT
|| GET_RTX_CLASS (code
) == 'c');
1813 /* Attempt to expand a unary operator. Make the expansion closer to the
1814 actual machine, then just general_operand, which will allow 2 separate
1815 memory references (one output, one input) in a single insn. Return
1816 whether the insn fails, or succeeds. */
1819 ix86_expand_unary_operator (code
, mode
, operands
)
1821 enum machine_mode mode
;
1826 /* If optimizing, copy to regs to improve CSE */
1829 && ((reload_in_progress
| reload_completed
) == 0)
1830 && GET_CODE (operands
[1]) == MEM
)
1831 operands
[1] = force_reg (GET_MODE (operands
[1]), operands
[1]);
1833 if (! ix86_unary_operator_ok (code
, mode
, operands
))
1835 if ((! TARGET_PSEUDO
|| optimize
== 0)
1836 && ((reload_in_progress
| reload_completed
) == 0)
1837 && GET_CODE (operands
[1]) == MEM
)
1839 operands
[1] = force_reg (GET_MODE (operands
[1]), operands
[1]);
1840 if (! ix86_unary_operator_ok (code
, mode
, operands
))
1850 /* Return TRUE or FALSE depending on whether the unary operator meets the
1851 appropriate constraints. */
1854 ix86_unary_operator_ok (code
, mode
, operands
)
1856 enum machine_mode mode
;
1862 static rtx pic_label_rtx
;
1863 static char pic_label_name
[256];
1864 static int pic_label_no
= 0;
1866 /* This function generates code for -fpic that loads %ebx with
1867 with the return address of the caller and then returns. */
1870 asm_output_function_prefix (file
, name
)
1875 int pic_reg_used
= flag_pic
&& (current_function_uses_pic_offset_table
1876 || current_function_uses_const_pool
);
1877 xops
[0] = pic_offset_table_rtx
;
1878 xops
[1] = stack_pointer_rtx
;
1880 /* Deep branch prediction favors having a return for every call. */
1881 if (pic_reg_used
&& TARGET_DEEP_BRANCH_PREDICTION
)
1885 if (pic_label_rtx
== 0)
1887 pic_label_rtx
= gen_label_rtx ();
1888 sprintf (pic_label_name
, "LPR%d", pic_label_no
++);
1889 LABEL_NAME (pic_label_rtx
) = pic_label_name
;
1892 prologue_node
= make_node (FUNCTION_DECL
);
1893 DECL_RESULT (prologue_node
) = 0;
1894 #ifdef ASM_DECLARE_FUNCTION_NAME
1895 ASM_DECLARE_FUNCTION_NAME (file
, pic_label_name
, prologue_node
);
1897 output_asm_insn ("movl (%1),%0", xops
);
1898 output_asm_insn ("ret", xops
);
1902 /* Generate the assembly code for function entry.
1903 FILE is an stdio stream to output the code to.
1904 SIZE is an int: how many units of temporary storage to allocate. */
1907 function_prologue (file
, size
)
1911 if (TARGET_SCHEDULE_PROLOGUE
)
1920 /* Expand the prologue into a bunch of separate insns. */
1923 ix86_expand_prologue ()
1925 if (! TARGET_SCHEDULE_PROLOGUE
)
1932 load_pic_register (do_rtl
)
1937 if (TARGET_DEEP_BRANCH_PREDICTION
)
1939 xops
[0] = pic_offset_table_rtx
;
1940 if (pic_label_rtx
== 0)
1942 pic_label_rtx
= gen_label_rtx ();
1943 sprintf (pic_label_name
, "LPR%d", pic_label_no
++);
1944 LABEL_NAME (pic_label_rtx
) = pic_label_name
;
1947 xops
[1] = gen_rtx_MEM (QImode
,
1948 gen_rtx_SYMBOL_REF (Pmode
,
1949 LABEL_NAME (pic_label_rtx
)));
1953 emit_insn (gen_prologue_get_pc (xops
[0], xops
[1]));
1954 emit_insn (gen_prologue_set_got
1956 gen_rtx_SYMBOL_REF (Pmode
, "$_GLOBAL_OFFSET_TABLE_"),
1961 output_asm_insn (AS1 (call
,%P1
), xops
);
1962 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_,%0", xops
);
1969 xops
[0] = pic_offset_table_rtx
;
1970 xops
[1] = gen_label_rtx ();
1974 /* We can't put a raw CODE_LABEL into the RTL, and we can't emit
1975 a new CODE_LABEL after reload, so we need a single pattern to
1976 emit the 3 necessary instructions. */
1977 emit_insn (gen_prologue_get_pc_and_set_got (xops
[0]));
1981 output_asm_insn (AS1 (call
,%P1
), xops
);
1982 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L",
1983 CODE_LABEL_NUMBER (xops
[1]));
1984 output_asm_insn (AS1 (pop
%L0
,%0), xops
);
1985 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_+[.-%P1],%0", xops
);
1989 /* When -fpic, we must emit a scheduling barrier, so that the instruction
1990 that restores %ebx (which is PIC_OFFSET_TABLE_REGNUM), does not get
1991 moved before any instruction which implicitly uses the got. */
1994 emit_insn (gen_blockage ());
1998 ix86_prologue (do_rtl
)
2004 int pic_reg_used
= flag_pic
&& (current_function_uses_pic_offset_table
2005 || current_function_uses_const_pool
);
2006 long tsize
= get_frame_size ();
2008 int cfa_offset
= INCOMING_FRAME_SP_OFFSET
, cfa_store_offset
= cfa_offset
;
2010 xops
[0] = stack_pointer_rtx
;
2011 xops
[1] = frame_pointer_rtx
;
2012 xops
[2] = GEN_INT (tsize
);
2014 if (frame_pointer_needed
)
2018 insn
= emit_insn (gen_rtx_SET
2020 gen_rtx_MEM (SImode
,
2021 gen_rtx_PRE_DEC (SImode
,
2022 stack_pointer_rtx
)),
2023 frame_pointer_rtx
));
2025 RTX_FRAME_RELATED_P (insn
) = 1;
2026 insn
= emit_move_insn (xops
[1], xops
[0]);
2027 RTX_FRAME_RELATED_P (insn
) = 1;
2032 output_asm_insn ("push%L1 %1", xops
);
2033 #ifdef INCOMING_RETURN_ADDR_RTX
2034 if (dwarf2out_do_frame ())
2036 char *l
= dwarf2out_cfi_label ();
2038 cfa_store_offset
+= 4;
2039 cfa_offset
= cfa_store_offset
;
2040 dwarf2out_def_cfa (l
, STACK_POINTER_REGNUM
, cfa_offset
);
2041 dwarf2out_reg_save (l
, FRAME_POINTER_REGNUM
, - cfa_store_offset
);
2045 output_asm_insn (AS2 (mov
%L0
,%0,%1), xops
);
2046 #ifdef INCOMING_RETURN_ADDR_RTX
2047 if (dwarf2out_do_frame ())
2048 dwarf2out_def_cfa ("", FRAME_POINTER_REGNUM
, cfa_offset
);
2055 else if (! TARGET_STACK_PROBE
|| tsize
< CHECK_STACK_LIMIT
)
2059 insn
= emit_insn (gen_prologue_set_stack_ptr (xops
[2]));
2060 RTX_FRAME_RELATED_P (insn
) = 1;
2064 output_asm_insn (AS2 (sub
%L0
,%2,%0), xops
);
2065 #ifdef INCOMING_RETURN_ADDR_RTX
2066 if (dwarf2out_do_frame ())
2068 cfa_store_offset
+= tsize
;
2069 if (! frame_pointer_needed
)
2071 cfa_offset
= cfa_store_offset
;
2072 dwarf2out_def_cfa ("", STACK_POINTER_REGNUM
, cfa_offset
);
2080 xops
[3] = gen_rtx_REG (SImode
, 0);
2082 emit_move_insn (xops
[3], xops
[2]);
2084 output_asm_insn (AS2 (mov
%L0
,%2,%3), xops
);
2086 xops
[3] = gen_rtx_MEM (FUNCTION_MODE
,
2087 gen_rtx_SYMBOL_REF (Pmode
, "_alloca"));
2090 emit_call_insn (gen_rtx_CALL (VOIDmode
, xops
[3], const0_rtx
));
2092 output_asm_insn (AS1 (call
,%P3
), xops
);
2095 /* Note If use enter it is NOT reversed args.
2096 This one is not reversed from intel!!
2097 I think enter is slower. Also sdb doesn't like it.
2098 But if you want it the code is:
2100 xops[3] = const0_rtx;
2101 output_asm_insn ("enter %2,%3", xops);
2105 limit
= (frame_pointer_needed
? FRAME_POINTER_REGNUM
: STACK_POINTER_REGNUM
);
2106 for (regno
= limit
- 1; regno
>= 0; regno
--)
2107 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
2108 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
2110 xops
[0] = gen_rtx_REG (SImode
, regno
);
2114 = emit_insn (gen_rtx_SET
2116 gen_rtx_MEM (SImode
,
2117 gen_rtx_PRE_DEC (SImode
,
2118 stack_pointer_rtx
)),
2121 RTX_FRAME_RELATED_P (insn
) = 1;
2125 output_asm_insn ("push%L0 %0", xops
);
2126 #ifdef INCOMING_RETURN_ADDR_RTX
2127 if (dwarf2out_do_frame ())
2129 char *l
= dwarf2out_cfi_label ();
2131 cfa_store_offset
+= 4;
2132 if (! frame_pointer_needed
)
2134 cfa_offset
= cfa_store_offset
;
2135 dwarf2out_def_cfa (l
, STACK_POINTER_REGNUM
, cfa_offset
);
2138 dwarf2out_reg_save (l
, regno
, - cfa_store_offset
);
2145 load_pic_register (do_rtl
);
2147 /* If we are profiling, make sure no instructions are scheduled before
2148 the call to mcount. However, if -fpic, the above call will have
2150 if ((profile_flag
|| profile_block_flag
)
2151 && ! pic_reg_used
&& do_rtl
)
2152 emit_insn (gen_blockage ());
2155 /* Return 1 if it is appropriate to emit `ret' instructions in the
2156 body of a function. Do this only if the epilogue is simple, needing a
2157 couple of insns. Prior to reloading, we can't tell how many registers
2158 must be saved, so return 0 then. Return 0 if there is no frame
2159 marker to de-allocate.
2161 If NON_SAVING_SETJMP is defined and true, then it is not possible
2162 for the epilogue to be simple, so return 0. This is a special case
2163 since NON_SAVING_SETJMP will not cause regs_ever_live to change
2164 until final, but jump_optimize may need to know sooner if a
2168 ix86_can_use_return_insn_p ()
2172 int reglimit
= (frame_pointer_needed
2173 ? FRAME_POINTER_REGNUM
: STACK_POINTER_REGNUM
);
2174 int pic_reg_used
= flag_pic
&& (current_function_uses_pic_offset_table
2175 || current_function_uses_const_pool
);
2177 #ifdef NON_SAVING_SETJMP
2178 if (NON_SAVING_SETJMP
&& current_function_calls_setjmp
)
2182 if (! reload_completed
)
2185 for (regno
= reglimit
- 1; regno
>= 0; regno
--)
2186 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
2187 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
2190 return nregs
== 0 || ! frame_pointer_needed
;
2193 /* This function generates the assembly code for function exit.
2194 FILE is an stdio stream to output the code to.
2195 SIZE is an int: how many units of temporary storage to deallocate. */
2198 function_epilogue (file
, size
)
2205 /* Restore function stack, frame, and registers. */
2208 ix86_expand_epilogue ()
2214 ix86_epilogue (do_rtl
)
2218 register int nregs
, limit
;
2221 int pic_reg_used
= flag_pic
&& (current_function_uses_pic_offset_table
2222 || current_function_uses_const_pool
);
2223 long tsize
= get_frame_size ();
2225 /* Compute the number of registers to pop */
2227 limit
= (frame_pointer_needed
? FRAME_POINTER_REGNUM
: STACK_POINTER_REGNUM
);
2231 for (regno
= limit
- 1; regno
>= 0; regno
--)
2232 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
2233 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
2236 /* sp is often unreliable so we must go off the frame pointer.
2238 In reality, we may not care if sp is unreliable, because we can restore
2239 the register relative to the frame pointer. In theory, since each move
2240 is the same speed as a pop, and we don't need the leal, this is faster.
2241 For now restore multiple registers the old way. */
2243 offset
= - tsize
- (nregs
* UNITS_PER_WORD
);
2245 xops
[2] = stack_pointer_rtx
;
2247 /* When -fpic, we must emit a scheduling barrier, so that the instruction
2248 that restores %ebx (which is PIC_OFFSET_TABLE_REGNUM), does not get
2249 moved before any instruction which implicitly uses the got. This
2250 includes any instruction which uses a SYMBOL_REF or a LABEL_REF.
2252 Alternatively, this could be fixed by making the dependence on the
2253 PIC_OFFSET_TABLE_REGNUM explicit in the RTL. */
2255 if (flag_pic
|| profile_flag
|| profile_block_flag
)
2256 emit_insn (gen_blockage ());
2258 if (nregs
> 1 || ! frame_pointer_needed
)
2260 if (frame_pointer_needed
)
2262 xops
[0] = adj_offsettable_operand (AT_BP (QImode
), offset
);
2264 emit_insn (gen_movsi_lea (xops
[2], XEXP (xops
[0], 0)));
2266 output_asm_insn (AS2 (lea
%L2
,%0,%2), xops
);
2269 for (regno
= 0; regno
< limit
; regno
++)
2270 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
2271 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
2273 xops
[0] = gen_rtx_REG (SImode
, regno
);
2276 emit_insn (gen_pop (xops
[0]));
2278 output_asm_insn ("pop%L0 %0", xops
);
2283 for (regno
= 0; regno
< limit
; regno
++)
2284 if ((regs_ever_live
[regno
] && ! call_used_regs
[regno
])
2285 || (regno
== PIC_OFFSET_TABLE_REGNUM
&& pic_reg_used
))
2287 xops
[0] = gen_rtx_REG (SImode
, regno
);
2288 xops
[1] = adj_offsettable_operand (AT_BP (Pmode
), offset
);
2291 emit_move_insn (xops
[0], xops
[1]);
2293 output_asm_insn (AS2 (mov
%L0
,%1,%0), xops
);
2298 if (frame_pointer_needed
)
2300 /* If not an i386, mov & pop is faster than "leave". */
2302 if (TARGET_USE_LEAVE
)
2305 emit_insn (gen_leave());
2307 output_asm_insn ("leave", xops
);
2311 xops
[0] = frame_pointer_rtx
;
2312 xops
[1] = stack_pointer_rtx
;
2316 emit_insn (gen_epilogue_set_stack_ptr());
2317 emit_insn (gen_pop (xops
[0]));
2321 output_asm_insn (AS2 (mov
%L2
,%0,%2), xops
);
2322 output_asm_insn ("pop%L0 %0", xops
);
2329 /* If there is no frame pointer, we must still release the frame. */
2330 xops
[0] = GEN_INT (tsize
);
2333 emit_insn (gen_rtx_SET (VOIDmode
, xops
[2],
2334 gen_rtx_PLUS (SImode
, xops
[2], xops
[0])));
2336 output_asm_insn (AS2 (add
%L2
,%0,%2), xops
);
2339 #ifdef FUNCTION_BLOCK_PROFILER_EXIT
2340 if (profile_block_flag
== 2)
2342 FUNCTION_BLOCK_PROFILER_EXIT(file
);
2346 if (current_function_pops_args
&& current_function_args_size
)
2348 xops
[1] = GEN_INT (current_function_pops_args
);
2350 /* i386 can only pop 32K bytes (maybe 64K? Is it signed?). If
2351 asked to pop more, pop return address, do explicit add, and jump
2352 indirectly to the caller. */
2354 if (current_function_pops_args
>= 32768)
2356 /* ??? Which register to use here? */
2357 xops
[0] = gen_rtx_REG (SImode
, 2);
2361 emit_insn (gen_pop (xops
[0]));
2362 emit_insn (gen_rtx_SET (VOIDmode
, xops
[2],
2363 gen_rtx_PLUS (SImode
, xops
[1],
2365 emit_jump_insn (xops
[0]);
2369 output_asm_insn ("pop%L0 %0", xops
);
2370 output_asm_insn (AS2 (add
%L2
,%1,%2), xops
);
2371 output_asm_insn ("jmp %*%0", xops
);
2377 emit_jump_insn (gen_return_pop_internal (xops
[1]));
2379 output_asm_insn ("ret %1", xops
);
2385 emit_jump_insn (gen_return_internal ());
2387 output_asm_insn ("ret", xops
);
2391 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2392 that is a valid memory address for an instruction.
2393 The MODE argument is the machine mode for the MEM expression
2394 that wants to use this address.
2396 On x86, legitimate addresses are:
2397 base movl (base),reg
2398 displacement movl disp,reg
2399 base + displacement movl disp(base),reg
2400 index + base movl (base,index),reg
2401 (index + base) + displacement movl disp(base,index),reg
2402 index*scale movl (,index,scale),reg
2403 index*scale + disp movl disp(,index,scale),reg
2404 index*scale + base movl (base,index,scale),reg
2405 (index*scale + base) + disp movl disp(base,index,scale),reg
2407 In each case, scale can be 1, 2, 4, 8. */
2409 /* This is exactly the same as print_operand_addr, except that
2410 it recognizes addresses instead of printing them.
2412 It only recognizes address in canonical form. LEGITIMIZE_ADDRESS should
2413 convert common non-canonical forms to canonical form so that they will
2416 #define ADDR_INVALID(msg,insn) \
2418 if (TARGET_DEBUG_ADDR) \
2420 fprintf (stderr, msg); \
2426 legitimate_address_p (mode
, addr
, strict
)
2427 enum machine_mode mode
;
2431 rtx base
= NULL_RTX
;
2432 rtx indx
= NULL_RTX
;
2433 rtx scale
= NULL_RTX
;
2434 rtx disp
= NULL_RTX
;
2436 if (TARGET_DEBUG_ADDR
)
2439 "\n======\nGO_IF_LEGITIMATE_ADDRESS, mode = %s, strict = %d\n",
2440 GET_MODE_NAME (mode
), strict
);
2445 if (GET_CODE (addr
) == REG
|| GET_CODE (addr
) == SUBREG
)
2448 else if (GET_CODE (addr
) == PLUS
)
2450 rtx op0
= XEXP (addr
, 0);
2451 rtx op1
= XEXP (addr
, 1);
2452 enum rtx_code code0
= GET_CODE (op0
);
2453 enum rtx_code code1
= GET_CODE (op1
);
2455 if (code0
== REG
|| code0
== SUBREG
)
2457 if (code1
== REG
|| code1
== SUBREG
)
2459 indx
= op0
; /* index + base */
2465 base
= op0
; /* base + displacement */
2470 else if (code0
== MULT
)
2472 indx
= XEXP (op0
, 0);
2473 scale
= XEXP (op0
, 1);
2475 if (code1
== REG
|| code1
== SUBREG
)
2476 base
= op1
; /* index*scale + base */
2479 disp
= op1
; /* index*scale + disp */
2482 else if (code0
== PLUS
&& GET_CODE (XEXP (op0
, 0)) == MULT
)
2484 indx
= XEXP (XEXP (op0
, 0), 0); /* index*scale + base + disp */
2485 scale
= XEXP (XEXP (op0
, 0), 1);
2486 base
= XEXP (op0
, 1);
2490 else if (code0
== PLUS
)
2492 indx
= XEXP (op0
, 0); /* index + base + disp */
2493 base
= XEXP (op0
, 1);
2499 ADDR_INVALID ("PLUS subcode is not valid.\n", op0
);
2504 else if (GET_CODE (addr
) == MULT
)
2506 indx
= XEXP (addr
, 0); /* index*scale */
2507 scale
= XEXP (addr
, 1);
2511 disp
= addr
; /* displacement */
2513 /* Allow arg pointer and stack pointer as index if there is not scaling */
2514 if (base
&& indx
&& !scale
2515 && (indx
== arg_pointer_rtx
|| indx
== stack_pointer_rtx
))
2522 /* Validate base register:
2524 Don't allow SUBREG's here, it can lead to spill failures when the base
2525 is one word out of a two word structure, which is represented internally
2530 if (GET_CODE (base
) != REG
)
2532 ADDR_INVALID ("Base is not a register.\n", base
);
2536 if ((strict
&& ! REG_OK_FOR_BASE_STRICT_P (base
))
2537 || (! strict
&& ! REG_OK_FOR_BASE_NONSTRICT_P (base
)))
2539 ADDR_INVALID ("Base is not valid.\n", base
);
2544 /* Validate index register:
2546 Don't allow SUBREG's here, it can lead to spill failures when the index
2547 is one word out of a two word structure, which is represented internally
2551 if (GET_CODE (indx
) != REG
)
2553 ADDR_INVALID ("Index is not a register.\n", indx
);
2557 if ((strict
&& ! REG_OK_FOR_INDEX_STRICT_P (indx
))
2558 || (! strict
&& ! REG_OK_FOR_INDEX_NONSTRICT_P (indx
)))
2560 ADDR_INVALID ("Index is not valid.\n", indx
);
2565 abort (); /* scale w/o index invalid */
2567 /* Validate scale factor: */
2570 HOST_WIDE_INT value
;
2572 if (GET_CODE (scale
) != CONST_INT
)
2574 ADDR_INVALID ("Scale is not valid.\n", scale
);
2578 value
= INTVAL (scale
);
2579 if (value
!= 1 && value
!= 2 && value
!= 4 && value
!= 8)
2581 ADDR_INVALID ("Scale is not a good multiplier.\n", scale
);
2586 /* Validate displacement
2587 Constant pool addresses must be handled special. They are
2588 considered legitimate addresses, but only if not used with regs.
2589 When printed, the output routines know to print the reference with the
2590 PIC reg, even though the PIC reg doesn't appear in the RTL. */
2593 if (GET_CODE (disp
) == SYMBOL_REF
2594 && CONSTANT_POOL_ADDRESS_P (disp
)
2599 else if (!CONSTANT_ADDRESS_P (disp
))
2601 ADDR_INVALID ("Displacement is not valid.\n", disp
);
2605 else if (GET_CODE (disp
) == CONST_DOUBLE
)
2607 ADDR_INVALID ("Displacement is a const_double.\n", disp
);
2611 else if (flag_pic
&& SYMBOLIC_CONST (disp
)
2612 && base
!= pic_offset_table_rtx
2613 && (indx
!= pic_offset_table_rtx
|| scale
!= NULL_RTX
))
2615 ADDR_INVALID ("Displacement is an invalid pic reference.\n", disp
);
2619 else if (HALF_PIC_P () && HALF_PIC_ADDRESS_P (disp
)
2620 && (base
!= NULL_RTX
|| indx
!= NULL_RTX
))
2622 ADDR_INVALID ("Displacement is an invalid half-pic reference.\n",
2628 if (TARGET_DEBUG_ADDR
)
2629 fprintf (stderr
, "Address is valid.\n");
2631 /* Everything looks valid, return true */
2635 /* Return a legitimate reference for ORIG (an address) using the
2636 register REG. If REG is 0, a new pseudo is generated.
2638 There are three types of references that must be handled:
2640 1. Global data references must load the address from the GOT, via
2641 the PIC reg. An insn is emitted to do this load, and the reg is
2644 2. Static data references must compute the address as an offset
2645 from the GOT, whose base is in the PIC reg. An insn is emitted to
2646 compute the address into a reg, and the reg is returned. Static
2647 data objects have SYMBOL_REF_FLAG set to differentiate them from
2648 global data objects.
2650 3. Constant pool addresses must be handled special. They are
2651 considered legitimate addresses, but only if not used with regs.
2652 When printed, the output routines know to print the reference with the
2653 PIC reg, even though the PIC reg doesn't appear in the RTL.
2655 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2656 reg also appears in the address (except for constant pool references,
2659 "switch" statements also require special handling when generating
2660 PIC code. See comments by the `casesi' insn in i386.md for details. */
2663 legitimize_pic_address (orig
, reg
)
2670 if (GET_CODE (addr
) == SYMBOL_REF
|| GET_CODE (addr
) == LABEL_REF
)
2672 if (GET_CODE (addr
) == SYMBOL_REF
&& CONSTANT_POOL_ADDRESS_P (addr
))
2677 reg
= gen_reg_rtx (Pmode
);
2679 if ((GET_CODE (addr
) == SYMBOL_REF
&& SYMBOL_REF_FLAG (addr
))
2680 || GET_CODE (addr
) == LABEL_REF
)
2681 new = gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, orig
);
2683 new = gen_rtx_MEM (Pmode
,
2684 gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
,
2687 emit_move_insn (reg
, new);
2689 current_function_uses_pic_offset_table
= 1;
2693 else if (GET_CODE (addr
) == CONST
|| GET_CODE (addr
) == PLUS
)
2697 if (GET_CODE (addr
) == CONST
)
2699 addr
= XEXP (addr
, 0);
2700 if (GET_CODE (addr
) != PLUS
)
2704 if (XEXP (addr
, 0) == pic_offset_table_rtx
)
2708 reg
= gen_reg_rtx (Pmode
);
2710 base
= legitimize_pic_address (XEXP (addr
, 0), reg
);
2711 addr
= legitimize_pic_address (XEXP (addr
, 1),
2712 base
== reg
? NULL_RTX
: reg
);
2714 if (GET_CODE (addr
) == CONST_INT
)
2715 return plus_constant (base
, INTVAL (addr
));
2717 if (GET_CODE (addr
) == PLUS
&& CONSTANT_P (XEXP (addr
, 1)))
2719 base
= gen_rtx_PLUS (Pmode
, base
, XEXP (addr
, 0));
2720 addr
= XEXP (addr
, 1);
2723 return gen_rtx_PLUS (Pmode
, base
, addr
);
2728 /* Emit insns to move operands[1] into operands[0]. */
2731 emit_pic_move (operands
, mode
)
2733 enum machine_mode mode
;
2735 rtx temp
= reload_in_progress
? operands
[0] : gen_reg_rtx (Pmode
);
2737 if (GET_CODE (operands
[0]) == MEM
&& SYMBOLIC_CONST (operands
[1]))
2738 operands
[1] = force_reg (SImode
, operands
[1]);
2740 operands
[1] = legitimize_pic_address (operands
[1], temp
);
2743 /* Try machine-dependent ways of modifying an illegitimate address
2744 to be legitimate. If we find one, return the new, valid address.
2745 This macro is used in only one place: `memory_address' in explow.c.
2747 OLDX is the address as it was before break_out_memory_refs was called.
2748 In some cases it is useful to look at this to decide what needs to be done.
2750 MODE and WIN are passed so that this macro can use
2751 GO_IF_LEGITIMATE_ADDRESS.
2753 It is always safe for this macro to do nothing. It exists to recognize
2754 opportunities to optimize the output.
2756 For the 80386, we handle X+REG by loading X into a register R and
2757 using R+REG. R will go in a general reg and indexing will be used.
2758 However, if REG is a broken-out memory address or multiplication,
2759 nothing needs to be done because REG can certainly go in a general reg.
2761 When -fpic is used, special handling is needed for symbolic references.
2762 See comments by legitimize_pic_address in i386.c for details. */
2765 legitimize_address (x
, oldx
, mode
)
2768 enum machine_mode mode
;
2773 if (TARGET_DEBUG_ADDR
)
2775 fprintf (stderr
, "\n==========\nLEGITIMIZE_ADDRESS, mode = %s\n",
2776 GET_MODE_NAME (mode
));
2780 if (flag_pic
&& SYMBOLIC_CONST (x
))
2781 return legitimize_pic_address (x
, 0);
2783 /* Canonicalize shifts by 0, 1, 2, 3 into multiply */
2784 if (GET_CODE (x
) == ASHIFT
2785 && GET_CODE (XEXP (x
, 1)) == CONST_INT
2786 && (log
= (unsigned)exact_log2 (INTVAL (XEXP (x
, 1)))) < 4)
2789 x
= gen_rtx_MULT (Pmode
, force_reg (Pmode
, XEXP (x
, 0)),
2790 GEN_INT (1 << log
));
2793 if (GET_CODE (x
) == PLUS
)
2795 /* Canonicalize shifts by 0, 1, 2, 3 into multiply. */
2797 if (GET_CODE (XEXP (x
, 0)) == ASHIFT
2798 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
2799 && (log
= (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x
, 0), 1)))) < 4)
2802 XEXP (x
, 0) = gen_rtx_MULT (Pmode
,
2803 force_reg (Pmode
, XEXP (XEXP (x
, 0), 0)),
2804 GEN_INT (1 << log
));
2807 if (GET_CODE (XEXP (x
, 1)) == ASHIFT
2808 && GET_CODE (XEXP (XEXP (x
, 1), 1)) == CONST_INT
2809 && (log
= (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x
, 1), 1)))) < 4)
2812 XEXP (x
, 1) = gen_rtx_MULT (Pmode
,
2813 force_reg (Pmode
, XEXP (XEXP (x
, 1), 0)),
2814 GEN_INT (1 << log
));
2817 /* Put multiply first if it isn't already. */
2818 if (GET_CODE (XEXP (x
, 1)) == MULT
)
2820 rtx tmp
= XEXP (x
, 0);
2821 XEXP (x
, 0) = XEXP (x
, 1);
2826 /* Canonicalize (plus (mult (reg) (const)) (plus (reg) (const)))
2827 into (plus (plus (mult (reg) (const)) (reg)) (const)). This can be
2828 created by virtual register instantiation, register elimination, and
2829 similar optimizations. */
2830 if (GET_CODE (XEXP (x
, 0)) == MULT
&& GET_CODE (XEXP (x
, 1)) == PLUS
)
2833 x
= gen_rtx_PLUS (Pmode
,
2834 gen_rtx_PLUS (Pmode
, XEXP (x
, 0),
2835 XEXP (XEXP (x
, 1), 0)),
2836 XEXP (XEXP (x
, 1), 1));
2840 (plus (plus (mult (reg) (const)) (plus (reg) (const))) const)
2841 into (plus (plus (mult (reg) (const)) (reg)) (const)). */
2842 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 0)) == PLUS
2843 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == MULT
2844 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == PLUS
2845 && CONSTANT_P (XEXP (x
, 1)))
2847 rtx constant
, other
;
2849 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
2851 constant
= XEXP (x
, 1);
2852 other
= XEXP (XEXP (XEXP (x
, 0), 1), 1);
2854 else if (GET_CODE (XEXP (XEXP (XEXP (x
, 0), 1), 1)) == CONST_INT
)
2856 constant
= XEXP (XEXP (XEXP (x
, 0), 1), 1);
2857 other
= XEXP (x
, 1);
2865 x
= gen_rtx_PLUS (Pmode
,
2866 gen_rtx_PLUS (Pmode
, XEXP (XEXP (x
, 0), 0),
2867 XEXP (XEXP (XEXP (x
, 0), 1), 0)),
2868 plus_constant (other
, INTVAL (constant
)));
2872 if (changed
&& legitimate_address_p (mode
, x
, FALSE
))
2875 if (GET_CODE (XEXP (x
, 0)) == MULT
)
2878 XEXP (x
, 0) = force_operand (XEXP (x
, 0), 0);
2881 if (GET_CODE (XEXP (x
, 1)) == MULT
)
2884 XEXP (x
, 1) = force_operand (XEXP (x
, 1), 0);
2888 && GET_CODE (XEXP (x
, 1)) == REG
2889 && GET_CODE (XEXP (x
, 0)) == REG
)
2892 if (flag_pic
&& SYMBOLIC_CONST (XEXP (x
, 1)))
2895 x
= legitimize_pic_address (x
, 0);
2898 if (changed
&& legitimate_address_p (mode
, x
, FALSE
))
2901 if (GET_CODE (XEXP (x
, 0)) == REG
)
2903 register rtx temp
= gen_reg_rtx (Pmode
);
2904 register rtx val
= force_operand (XEXP (x
, 1), temp
);
2906 emit_move_insn (temp
, val
);
2912 else if (GET_CODE (XEXP (x
, 1)) == REG
)
2914 register rtx temp
= gen_reg_rtx (Pmode
);
2915 register rtx val
= force_operand (XEXP (x
, 0), temp
);
2917 emit_move_insn (temp
, val
);
2927 /* Print an integer constant expression in assembler syntax. Addition
2928 and subtraction are the only arithmetic that may appear in these
2929 expressions. FILE is the stdio stream to write to, X is the rtx, and
2930 CODE is the operand print code from the output string. */
2933 output_pic_addr_const (file
, x
, code
)
2940 switch (GET_CODE (x
))
2951 if (GET_CODE (x
) == SYMBOL_REF
)
2952 assemble_name (file
, XSTR (x
, 0));
2955 ASM_GENERATE_INTERNAL_LABEL (buf
, "L",
2956 CODE_LABEL_NUMBER (XEXP (x
, 0)));
2957 assemble_name (asm_out_file
, buf
);
2960 if (GET_CODE (x
) == SYMBOL_REF
&& CONSTANT_POOL_ADDRESS_P (x
))
2961 fprintf (file
, "@GOTOFF(%%ebx)");
2962 else if (code
== 'P')
2963 fprintf (file
, "@PLT");
2964 else if (GET_CODE (x
) == LABEL_REF
)
2965 fprintf (file
, "@GOTOFF");
2966 else if (! SYMBOL_REF_FLAG (x
))
2967 fprintf (file
, "@GOT");
2969 fprintf (file
, "@GOTOFF");
2974 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (x
));
2975 assemble_name (asm_out_file
, buf
);
2979 fprintf (file
, "%d", INTVAL (x
));
2983 /* This used to output parentheses around the expression,
2984 but that does not work on the 386 (either ATT or BSD assembler). */
2985 output_pic_addr_const (file
, XEXP (x
, 0), code
);
2989 if (GET_MODE (x
) == VOIDmode
)
2991 /* We can use %d if the number is <32 bits and positive. */
2992 if (CONST_DOUBLE_HIGH (x
) || CONST_DOUBLE_LOW (x
) < 0)
2993 fprintf (file
, "0x%x%08x",
2994 CONST_DOUBLE_HIGH (x
), CONST_DOUBLE_LOW (x
));
2996 fprintf (file
, "%d", CONST_DOUBLE_LOW (x
));
2999 /* We can't handle floating point constants;
3000 PRINT_OPERAND must handle them. */
3001 output_operand_lossage ("floating constant misused");
3005 /* Some assemblers need integer constants to appear first. */
3006 if (GET_CODE (XEXP (x
, 0)) == CONST_INT
)
3008 output_pic_addr_const (file
, XEXP (x
, 0), code
);
3009 if (INTVAL (XEXP (x
, 1)) >= 0)
3010 fprintf (file
, "+");
3011 output_pic_addr_const (file
, XEXP (x
, 1), code
);
3015 output_pic_addr_const (file
, XEXP (x
, 1), code
);
3016 if (INTVAL (XEXP (x
, 0)) >= 0)
3017 fprintf (file
, "+");
3018 output_pic_addr_const (file
, XEXP (x
, 0), code
);
3023 output_pic_addr_const (file
, XEXP (x
, 0), code
);
3024 fprintf (file
, "-");
3025 output_pic_addr_const (file
, XEXP (x
, 1), code
);
3029 output_operand_lossage ("invalid expression as operand");
3033 /* Append the correct conditional move suffix which corresponds to CODE. */
3036 put_condition_code (code
, reverse_cc
, mode
, file
)
3039 enum mode_class mode
;
3042 int ieee
= (TARGET_IEEE_FP
&& (cc_prev_status
.flags
& CC_IN_80387
)
3043 && ! (cc_prev_status
.flags
& CC_FCOMI
));
3044 if (reverse_cc
&& ! ieee
)
3045 code
= reverse_condition (code
);
3047 if (mode
== MODE_INT
)
3051 if (cc_prev_status
.flags
& CC_Z_IN_NOT_C
)
3058 if (cc_prev_status
.flags
& CC_Z_IN_NOT_C
)
3097 output_operand_lossage ("Invalid %%C operand");
3100 else if (mode
== MODE_FLOAT
)
3104 fputs (ieee
? (reverse_cc
? "ne" : "e") : "ne", file
);
3107 fputs (ieee
? (reverse_cc
? "ne" : "e") : "e", file
);
3110 fputs (ieee
? (reverse_cc
? "ne" : "e") : "nb", file
);
3113 fputs (ieee
? (reverse_cc
? "ne" : "e") : "nbe", file
);
3116 fputs (ieee
? (reverse_cc
? "nb" : "b") : "be", file
);
3119 fputs (ieee
? (reverse_cc
? "ne" : "e") : "b", file
);
3122 fputs (ieee
? (reverse_cc
? "ne" : "e") : "nb", file
);
3125 fputs (ieee
? (reverse_cc
? "ne" : "e") : "nbe", file
);
3128 fputs (ieee
? (reverse_cc
? "nb" : "b") : "be", file
);
3131 fputs (ieee
? (reverse_cc
? "ne" : "e") : "b", file
);
3134 output_operand_lossage ("Invalid %%C operand");
3139 L,W,B,Q,S,T -- print the opcode suffix for specified size of operand.
3140 C -- print opcode suffix for set/cmov insn.
3141 c -- like C, but print reversed condition
3142 F -- print opcode suffix for fcmov insn.
3143 f -- like C, but print reversed condition
3144 R -- print the prefix for register names.
3145 z -- print the opcode suffix for the size of the current operand.
3146 * -- print a star (in certain assembler syntax)
3147 w -- print the operand as if it's a "word" (HImode) even if it isn't.
3148 c -- don't print special prefixes before constant operands.
3149 J -- print the appropriate jump operand.
3150 s -- print a shift double count, followed by the assemblers argument
3152 b -- print the QImode name of the register for the indicated operand.
3153 %b0 would print %al if operands[0] is reg 0.
3154 w -- likewise, print the HImode name of the register.
3155 k -- likewise, print the SImode name of the register.
3156 h -- print the QImode name for a "high" register, either ah, bh, ch or dh.
3157 y -- print "st(0)" instead of "st" as a register.
3158 P -- print as a PIC constant */
3161 print_operand (file
, x
, code
)
3176 PUT_OP_SIZE (code
, 'l', file
);
3180 PUT_OP_SIZE (code
, 'w', file
);
3184 PUT_OP_SIZE (code
, 'b', file
);
3188 PUT_OP_SIZE (code
, 'l', file
);
3192 PUT_OP_SIZE (code
, 's', file
);
3196 PUT_OP_SIZE (code
, 't', file
);
3200 /* 387 opcodes don't get size suffixes if the operands are
3203 if (STACK_REG_P (x
))
3206 /* this is the size of op from size of operand */
3207 switch (GET_MODE_SIZE (GET_MODE (x
)))
3210 PUT_OP_SIZE ('B', 'b', file
);
3214 PUT_OP_SIZE ('W', 'w', file
);
3218 if (GET_MODE (x
) == SFmode
)
3220 PUT_OP_SIZE ('S', 's', file
);
3224 PUT_OP_SIZE ('L', 'l', file
);
3228 PUT_OP_SIZE ('T', 't', file
);
3232 if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_INT
)
3234 #ifdef GAS_MNEMONICS
3235 PUT_OP_SIZE ('Q', 'q', file
);
3238 PUT_OP_SIZE ('Q', 'l', file
); /* Fall through */
3242 PUT_OP_SIZE ('Q', 'l', file
);
3255 switch (GET_CODE (x
))
3257 /* These conditions are appropriate for testing the result
3258 of an arithmetic operation, not for a compare operation.
3259 Cases GE, LT assume CC_NO_OVERFLOW true. All cases assume
3260 CC_Z_IN_NOT_C false and not floating point. */
3261 case NE
: fputs ("jne", file
); return;
3262 case EQ
: fputs ("je", file
); return;
3263 case GE
: fputs ("jns", file
); return;
3264 case LT
: fputs ("js", file
); return;
3265 case GEU
: fputs ("jmp", file
); return;
3266 case GTU
: fputs ("jne", file
); return;
3267 case LEU
: fputs ("je", file
); return;
3268 case LTU
: fputs ("#branch never", file
); return;
3270 /* no matching branches for GT nor LE */
3275 if (GET_CODE (x
) == CONST_INT
|| ! SHIFT_DOUBLE_OMITS_COUNT
)
3277 PRINT_OPERAND (file
, x
, 0);
3278 fputs (AS2C (,) + 1, file
);
3283 /* This is used by the conditional move instructions. */
3285 put_condition_code (GET_CODE (x
), 0, MODE_INT
, file
);
3288 /* Like above, but reverse condition */
3290 put_condition_code (GET_CODE (x
), 1, MODE_INT
, file
); return;
3293 put_condition_code (GET_CODE (x
), 0, MODE_FLOAT
, file
);
3296 /* Like above, but reverse condition */
3298 put_condition_code (GET_CODE (x
), 1, MODE_FLOAT
, file
);
3305 sprintf (str
, "invalid operand code `%c'", code
);
3306 output_operand_lossage (str
);
3311 if (GET_CODE (x
) == REG
)
3313 PRINT_REG (x
, code
, file
);
3316 else if (GET_CODE (x
) == MEM
)
3318 PRINT_PTR (x
, file
);
3319 if (CONSTANT_ADDRESS_P (XEXP (x
, 0)))
3322 output_pic_addr_const (file
, XEXP (x
, 0), code
);
3324 output_addr_const (file
, XEXP (x
, 0));
3327 output_address (XEXP (x
, 0));
3330 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == SFmode
)
3335 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
3336 REAL_VALUE_TO_TARGET_SINGLE (r
, l
);
3337 PRINT_IMMED_PREFIX (file
);
3338 fprintf (file
, "0x%x", l
);
3341 /* These float cases don't actually occur as immediate operands. */
3342 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == DFmode
)
3347 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
3348 REAL_VALUE_TO_DECIMAL (r
, "%.22e", dstr
);
3349 fprintf (file
, "%s", dstr
);
3352 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == XFmode
)
3357 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
3358 REAL_VALUE_TO_DECIMAL (r
, "%.22e", dstr
);
3359 fprintf (file
, "%s", dstr
);
3365 if (GET_CODE (x
) == CONST_INT
|| GET_CODE (x
) == CONST_DOUBLE
)
3366 PRINT_IMMED_PREFIX (file
);
3367 else if (GET_CODE (x
) == CONST
|| GET_CODE (x
) == SYMBOL_REF
3368 || GET_CODE (x
) == LABEL_REF
)
3369 PRINT_OFFSET_PREFIX (file
);
3372 output_pic_addr_const (file
, x
, code
);
3374 output_addr_const (file
, x
);
3378 /* Print a memory operand whose address is ADDR. */
3381 print_operand_address (file
, addr
)
3385 register rtx reg1
, reg2
, breg
, ireg
;
3388 switch (GET_CODE (addr
))
3392 fprintf (file
, "%se", RP
);
3393 fputs (hi_reg_name
[REGNO (addr
)], file
);
3403 if (CONSTANT_ADDRESS_P (XEXP (addr
, 0)))
3405 offset
= XEXP (addr
, 0);
3406 addr
= XEXP (addr
, 1);
3408 else if (CONSTANT_ADDRESS_P (XEXP (addr
, 1)))
3410 offset
= XEXP (addr
, 1);
3411 addr
= XEXP (addr
, 0);
3414 if (GET_CODE (addr
) != PLUS
)
3416 else if (GET_CODE (XEXP (addr
, 0)) == MULT
)
3417 reg1
= XEXP (addr
, 0), addr
= XEXP (addr
, 1);
3418 else if (GET_CODE (XEXP (addr
, 1)) == MULT
)
3419 reg1
= XEXP (addr
, 1), addr
= XEXP (addr
, 0);
3420 else if (GET_CODE (XEXP (addr
, 0)) == REG
)
3421 reg1
= XEXP (addr
, 0), addr
= XEXP (addr
, 1);
3422 else if (GET_CODE (XEXP (addr
, 1)) == REG
)
3423 reg1
= XEXP (addr
, 1), addr
= XEXP (addr
, 0);
3425 if (GET_CODE (addr
) == REG
|| GET_CODE (addr
) == MULT
)
3442 if ((reg1
&& GET_CODE (reg1
) == MULT
)
3443 || (reg2
!= 0 && REGNO_OK_FOR_BASE_P (REGNO (reg2
))))
3448 else if (reg1
!= 0 && REGNO_OK_FOR_BASE_P (REGNO (reg1
)))
3454 if (ireg
!= 0 || breg
!= 0)
3461 output_pic_addr_const (file
, addr
, 0);
3462 else if (GET_CODE (addr
) == LABEL_REF
)
3463 output_asm_label (addr
);
3465 output_addr_const (file
, addr
);
3468 if (ireg
!= 0 && GET_CODE (ireg
) == MULT
)
3470 scale
= INTVAL (XEXP (ireg
, 1));
3471 ireg
= XEXP (ireg
, 0);
3474 /* The stack pointer can only appear as a base register,
3475 never an index register, so exchange the regs if it is wrong. */
3477 if (scale
== 1 && ireg
&& REGNO (ireg
) == STACK_POINTER_REGNUM
)
3486 /* output breg+ireg*scale */
3487 PRINT_B_I_S (breg
, ireg
, scale
, file
);
3495 if (GET_CODE (XEXP (addr
, 0)) == CONST_INT
)
3497 scale
= INTVAL (XEXP (addr
, 0));
3498 ireg
= XEXP (addr
, 1);
3502 scale
= INTVAL (XEXP (addr
, 1));
3503 ireg
= XEXP (addr
, 0);
3506 output_addr_const (file
, const0_rtx
);
3507 PRINT_B_I_S (NULL_RTX
, ireg
, scale
, file
);
3512 if (GET_CODE (addr
) == CONST_INT
3513 && INTVAL (addr
) < 0x8000
3514 && INTVAL (addr
) >= -0x8000)
3515 fprintf (file
, "%d", INTVAL (addr
));
3519 output_pic_addr_const (file
, addr
, 0);
3521 output_addr_const (file
, addr
);
3526 /* Set the cc_status for the results of an insn whose pattern is EXP.
3527 On the 80386, we assume that only test and compare insns, as well
3528 as SI, HI, & DI mode ADD, SUB, NEG, AND, IOR, XOR, ASHIFT,
3529 ASHIFTRT, and LSHIFTRT instructions set the condition codes usefully.
3530 Also, we assume that jumps, moves and sCOND don't affect the condition
3531 codes. All else clobbers the condition codes, by assumption.
3533 We assume that ALL integer add, minus, etc. instructions effect the
3534 condition codes. This MUST be consistent with i386.md.
3536 We don't record any float test or compare - the redundant test &
3537 compare check in final.c does not handle stack-like regs correctly. */
3540 notice_update_cc (exp
)
3543 if (GET_CODE (exp
) == SET
)
3545 /* Jumps do not alter the cc's. */
3546 if (SET_DEST (exp
) == pc_rtx
)
3549 /* Moving register or memory into a register:
3550 it doesn't alter the cc's, but it might invalidate
3551 the RTX's which we remember the cc's came from.
3552 (Note that moving a constant 0 or 1 MAY set the cc's). */
3553 if (REG_P (SET_DEST (exp
))
3554 && (REG_P (SET_SRC (exp
)) || GET_CODE (SET_SRC (exp
)) == MEM
3555 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp
))) == '<'))
3557 if (cc_status
.value1
3558 && reg_overlap_mentioned_p (SET_DEST (exp
), cc_status
.value1
))
3559 cc_status
.value1
= 0;
3561 if (cc_status
.value2
3562 && reg_overlap_mentioned_p (SET_DEST (exp
), cc_status
.value2
))
3563 cc_status
.value2
= 0;
3568 /* Moving register into memory doesn't alter the cc's.
3569 It may invalidate the RTX's which we remember the cc's came from. */
3570 if (GET_CODE (SET_DEST (exp
)) == MEM
3571 && (REG_P (SET_SRC (exp
))
3572 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp
))) == '<'))
3574 if (cc_status
.value1
3575 && reg_overlap_mentioned_p (SET_DEST (exp
), cc_status
.value1
))
3576 cc_status
.value1
= 0;
3577 if (cc_status
.value2
3578 && reg_overlap_mentioned_p (SET_DEST (exp
), cc_status
.value2
))
3579 cc_status
.value2
= 0;
3584 /* Function calls clobber the cc's. */
3585 else if (GET_CODE (SET_SRC (exp
)) == CALL
)
3591 /* Tests and compares set the cc's in predictable ways. */
3592 else if (SET_DEST (exp
) == cc0_rtx
)
3595 cc_status
.value1
= SET_SRC (exp
);
3599 /* Certain instructions effect the condition codes. */
3600 else if (GET_MODE (SET_SRC (exp
)) == SImode
3601 || GET_MODE (SET_SRC (exp
)) == HImode
3602 || GET_MODE (SET_SRC (exp
)) == QImode
)
3603 switch (GET_CODE (SET_SRC (exp
)))
3605 case ASHIFTRT
: case LSHIFTRT
: case ASHIFT
:
3606 /* Shifts on the 386 don't set the condition codes if the
3607 shift count is zero. */
3608 if (GET_CODE (XEXP (SET_SRC (exp
), 1)) != CONST_INT
)
3614 /* We assume that the CONST_INT is non-zero (this rtx would
3615 have been deleted if it were zero. */
3617 case PLUS
: case MINUS
: case NEG
:
3618 case AND
: case IOR
: case XOR
:
3619 cc_status
.flags
= CC_NO_OVERFLOW
;
3620 cc_status
.value1
= SET_SRC (exp
);
3621 cc_status
.value2
= SET_DEST (exp
);
3632 else if (GET_CODE (exp
) == PARALLEL
3633 && GET_CODE (XVECEXP (exp
, 0, 0)) == SET
)
3635 if (SET_DEST (XVECEXP (exp
, 0, 0)) == pc_rtx
)
3637 if (SET_DEST (XVECEXP (exp
, 0, 0)) == cc0_rtx
)
3641 if (stack_regs_mentioned_p (SET_SRC (XVECEXP (exp
, 0, 0))))
3643 cc_status
.flags
|= CC_IN_80387
;
3644 if (TARGET_CMOVE
&& stack_regs_mentioned_p
3645 (XEXP (SET_SRC (XVECEXP (exp
, 0, 0)), 1)))
3646 cc_status
.flags
|= CC_FCOMI
;
3649 cc_status
.value1
= SET_SRC (XVECEXP (exp
, 0, 0));
3661 /* Split one or more DImode RTL references into pairs of SImode
3662 references. The RTL can be REG, offsettable MEM, integer constant, or
3663 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
3664 split and "num" is its length. lo_half and hi_half are output arrays
3665 that parallel "operands". */
3668 split_di (operands
, num
, lo_half
, hi_half
)
3671 rtx lo_half
[], hi_half
[];
3675 if (GET_CODE (operands
[num
]) == REG
)
3677 lo_half
[num
] = gen_rtx_REG (SImode
, REGNO (operands
[num
]));
3678 hi_half
[num
] = gen_rtx_REG (SImode
, REGNO (operands
[num
]) + 1);
3680 else if (CONSTANT_P (operands
[num
]))
3681 split_double (operands
[num
], &lo_half
[num
], &hi_half
[num
]);
3682 else if (offsettable_memref_p (operands
[num
]))
3684 lo_half
[num
] = operands
[num
];
3685 hi_half
[num
] = adj_offsettable_operand (operands
[num
], 4);
3692 /* Return 1 if this is a valid binary operation on a 387.
3693 OP is the expression matched, and MODE is its mode. */
3696 binary_387_op (op
, mode
)
3698 enum machine_mode mode
;
3700 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
3703 switch (GET_CODE (op
))
3709 return GET_MODE_CLASS (GET_MODE (op
)) == MODE_FLOAT
;
3716 /* Return 1 if this is a valid shift or rotate operation on a 386.
3717 OP is the expression matched, and MODE is its mode. */
3722 enum machine_mode mode
;
3724 rtx operand
= XEXP (op
, 0);
3726 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
3729 if (GET_MODE (operand
) != GET_MODE (op
)
3730 || GET_MODE_CLASS (GET_MODE (op
)) != MODE_INT
)
3733 return (GET_CODE (op
) == ASHIFT
3734 || GET_CODE (op
) == ASHIFTRT
3735 || GET_CODE (op
) == LSHIFTRT
3736 || GET_CODE (op
) == ROTATE
3737 || GET_CODE (op
) == ROTATERT
);
3740 /* Return 1 if OP is COMPARE rtx with mode VOIDmode.
3741 MODE is not used. */
3744 VOIDmode_compare_op (op
, mode
)
3746 enum machine_mode mode
;
3748 return GET_CODE (op
) == COMPARE
&& GET_MODE (op
) == VOIDmode
;
3751 /* Output code to perform a 387 binary operation in INSN, one of PLUS,
3752 MINUS, MULT or DIV. OPERANDS are the insn operands, where operands[3]
3753 is the expression of the binary operation. The output may either be
3754 emitted here, or returned to the caller, like all output_* functions.
3756 There is no guarantee that the operands are the same mode, as they
3757 might be within FLOAT or FLOAT_EXTEND expressions. */
3760 output_387_binary_op (insn
, operands
)
3766 static char buf
[100];
3768 switch (GET_CODE (operands
[3]))
3771 if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_INT
3772 || GET_MODE_CLASS (GET_MODE (operands
[2])) == MODE_INT
)
3779 if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_INT
3780 || GET_MODE_CLASS (GET_MODE (operands
[2])) == MODE_INT
)
3787 if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_INT
3788 || GET_MODE_CLASS (GET_MODE (operands
[2])) == MODE_INT
)
3795 if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_INT
3796 || GET_MODE_CLASS (GET_MODE (operands
[2])) == MODE_INT
)
3806 strcpy (buf
, base_op
);
3808 switch (GET_CODE (operands
[3]))
3812 if (REG_P (operands
[2]) && REGNO (operands
[0]) == REGNO (operands
[2]))
3815 operands
[2] = operands
[1];
3819 if (GET_CODE (operands
[2]) == MEM
)
3820 return strcat (buf
, AS1 (%z2
,%2));
3822 if (NON_STACK_REG_P (operands
[1]))
3824 output_op_from_reg (operands
[1], strcat (buf
, AS1 (%z0
,%1)));
3828 else if (NON_STACK_REG_P (operands
[2]))
3830 output_op_from_reg (operands
[2], strcat (buf
, AS1 (%z0
,%1)));
3834 if (find_regno_note (insn
, REG_DEAD
, REGNO (operands
[2])))
3835 if (STACK_TOP_P (operands
[0]))
3836 return strcat (buf
, AS2 (p
,%0,%2));
3838 return strcat (buf
, AS2 (p
,%2,%0));
3840 if (STACK_TOP_P (operands
[0]))
3841 return strcat (buf
, AS2C (%y2
,%0));
3843 return strcat (buf
, AS2C (%2,%0));
3847 if (GET_CODE (operands
[1]) == MEM
)
3848 return strcat (buf
, AS1 (r
%z1
,%1));
3850 if (GET_CODE (operands
[2]) == MEM
)
3851 return strcat (buf
, AS1 (%z2
,%2));
3853 if (NON_STACK_REG_P (operands
[1]))
3855 output_op_from_reg (operands
[1], strcat (buf
, AS1 (r
%z0
,%1)));
3859 else if (NON_STACK_REG_P (operands
[2]))
3861 output_op_from_reg (operands
[2], strcat (buf
, AS1 (%z0
,%1)));
3865 if (! STACK_REG_P (operands
[1]) || ! STACK_REG_P (operands
[2]))
3868 if (find_regno_note (insn
, REG_DEAD
, REGNO (operands
[2])))
3869 if (STACK_TOP_P (operands
[0]))
3870 return strcat (buf
, AS2 (p
,%0,%2));
3872 return strcat (buf
, AS2 (rp
,%2,%0));
3874 if (find_regno_note (insn
, REG_DEAD
, REGNO (operands
[1])))
3875 if (STACK_TOP_P (operands
[0]))
3876 return strcat (buf
, AS2 (rp
,%0,%1));
3878 return strcat (buf
, AS2 (p
,%1,%0));
3880 if (STACK_TOP_P (operands
[0]))
3882 if (STACK_TOP_P (operands
[1]))
3883 return strcat (buf
, AS2C (%y2
,%0));
3885 return strcat (buf
, AS2 (r
,%y1
,%0));
3887 else if (STACK_TOP_P (operands
[1]))
3888 return strcat (buf
, AS2C (%1,%0));
3890 return strcat (buf
, AS2 (r
,%2,%0));
3897 /* Output code for INSN to convert a float to a signed int. OPERANDS
3898 are the insn operands. The output may be SFmode or DFmode and the
3899 input operand may be SImode or DImode. As a special case, make sure
3900 that the 387 stack top dies if the output mode is DImode, because the
3901 hardware requires this. */
3904 output_fix_trunc (insn
, operands
)
3908 int stack_top_dies
= find_regno_note (insn
, REG_DEAD
, FIRST_STACK_REG
) != 0;
3911 if (! STACK_TOP_P (operands
[1])
3912 || (GET_MODE (operands
[0]) == DImode
&& ! stack_top_dies
))
3915 xops
[0] = GEN_INT (12);
3916 xops
[1] = operands
[4];
3918 output_asm_insn (AS1 (fnstc
%W2
,%2), operands
);
3919 output_asm_insn (AS2 (mov
%L2
,%2,%4), operands
);
3920 output_asm_insn (AS2 (mov
%B1
,%0,%h1
), xops
);
3921 output_asm_insn (AS2 (mov
%L4
,%4,%3), operands
);
3922 output_asm_insn (AS1 (fldc
%W3
,%3), operands
);
3924 if (NON_STACK_REG_P (operands
[0]))
3925 output_to_reg (operands
[0], stack_top_dies
, operands
[3]);
3927 else if (GET_CODE (operands
[0]) == MEM
)
3930 output_asm_insn (AS1 (fistp
%z0
,%0), operands
);
3932 output_asm_insn (AS1 (fist
%z0
,%0), operands
);
3937 return AS1 (fldc
%W2
,%2);
3940 /* Output code for INSN to compare OPERANDS. The two operands might
3941 not have the same mode: one might be within a FLOAT or FLOAT_EXTEND
3942 expression. If the compare is in mode CCFPEQmode, use an opcode that
3943 will not fault if a qNaN is present. */
3946 output_float_compare (insn
, operands
)
3951 rtx body
= XVECEXP (PATTERN (insn
), 0, 0);
3952 int unordered_compare
= GET_MODE (SET_SRC (body
)) == CCFPEQmode
;
3955 if (TARGET_CMOVE
&& STACK_REG_P (operands
[1]))
3957 cc_status
.flags
|= CC_FCOMI
;
3958 cc_prev_status
.flags
&= ~CC_TEST_AX
;
3961 if (! STACK_TOP_P (operands
[0]))
3964 operands
[0] = operands
[1];
3966 cc_status
.flags
|= CC_REVERSED
;
3969 if (! STACK_TOP_P (operands
[0]))
3972 stack_top_dies
= find_regno_note (insn
, REG_DEAD
, FIRST_STACK_REG
) != 0;
3974 if (STACK_REG_P (operands
[1])
3976 && find_regno_note (insn
, REG_DEAD
, REGNO (operands
[1]))
3977 && REGNO (operands
[1]) != FIRST_STACK_REG
)
3979 /* If both the top of the 387 stack dies, and the other operand
3980 is also a stack register that dies, then this must be a
3981 `fcompp' float compare */
3983 if (unordered_compare
)
3985 if (cc_status
.flags
& CC_FCOMI
)
3987 output_asm_insn (AS2 (fucomip
,%y1
,%0), operands
);
3988 output_asm_insn (AS1 (fstp
, %y0
), operands
);
3992 output_asm_insn ("fucompp", operands
);
3996 if (cc_status
.flags
& CC_FCOMI
)
3998 output_asm_insn (AS2 (fcomip
, %y1
,%0), operands
);
3999 output_asm_insn (AS1 (fstp
, %y0
), operands
);
4003 output_asm_insn ("fcompp", operands
);
4008 static char buf
[100];
4010 /* Decide if this is the integer or float compare opcode, or the
4011 unordered float compare. */
4013 if (unordered_compare
)
4014 strcpy (buf
, (cc_status
.flags
& CC_FCOMI
) ? "fucomi" : "fucom");
4015 else if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_FLOAT
)
4016 strcpy (buf
, (cc_status
.flags
& CC_FCOMI
) ? "fcomi" : "fcom");
4018 strcpy (buf
, "ficom");
4020 /* Modify the opcode if the 387 stack is to be popped. */
4025 if (NON_STACK_REG_P (operands
[1]))
4026 output_op_from_reg (operands
[1], strcat (buf
, AS1 (%z0
,%1)));
4027 else if (cc_status
.flags
& CC_FCOMI
)
4029 output_asm_insn (strcat (buf
, AS2 (%z1
,%y1
,%0)), operands
);
4033 output_asm_insn (strcat (buf
, AS1 (%z1
,%y1
)), operands
);
4036 /* Now retrieve the condition code. */
4038 return output_fp_cc0_set (insn
);
4041 /* Output opcodes to transfer the results of FP compare or test INSN
4042 from the FPU to the CPU flags. If TARGET_IEEE_FP, ensure that if the
4043 result of the compare or test is unordered, no comparison operator
4044 succeeds except NE. Return an output template, if any. */
4047 output_fp_cc0_set (insn
)
4051 rtx unordered_label
;
4055 xops
[0] = gen_rtx_REG (HImode
, 0);
4056 output_asm_insn (AS1 (fnsts
%W0
,%0), xops
);
4058 if (! TARGET_IEEE_FP
)
4060 if (!(cc_status
.flags
& CC_REVERSED
))
4062 next
= next_cc0_user (insn
);
4064 if (GET_CODE (next
) == JUMP_INSN
4065 && GET_CODE (PATTERN (next
)) == SET
4066 && SET_DEST (PATTERN (next
)) == pc_rtx
4067 && GET_CODE (SET_SRC (PATTERN (next
))) == IF_THEN_ELSE
)
4068 code
= GET_CODE (XEXP (SET_SRC (PATTERN (next
)), 0));
4069 else if (GET_CODE (PATTERN (next
)) == SET
)
4070 code
= GET_CODE (SET_SRC (PATTERN (next
)));
4074 if (code
== GT
|| code
== LT
|| code
== EQ
|| code
== NE
4075 || code
== LE
|| code
== GE
)
4077 /* We will test eax directly. */
4078 cc_status
.flags
|= CC_TEST_AX
;
4086 next
= next_cc0_user (insn
);
4087 if (next
== NULL_RTX
)
4090 if (GET_CODE (next
) == JUMP_INSN
4091 && GET_CODE (PATTERN (next
)) == SET
4092 && SET_DEST (PATTERN (next
)) == pc_rtx
4093 && GET_CODE (SET_SRC (PATTERN (next
))) == IF_THEN_ELSE
)
4094 code
= GET_CODE (XEXP (SET_SRC (PATTERN (next
)), 0));
4095 else if (GET_CODE (PATTERN (next
)) == SET
)
4097 if (GET_CODE (SET_SRC (PATTERN (next
))) == IF_THEN_ELSE
)
4098 code
= GET_CODE (XEXP (SET_SRC (PATTERN (next
)), 0));
4100 code
= GET_CODE (SET_SRC (PATTERN (next
)));
4103 else if (GET_CODE (PATTERN (next
)) == PARALLEL
4104 && GET_CODE (XVECEXP (PATTERN (next
), 0, 0)) == SET
)
4106 if (GET_CODE (SET_SRC (XVECEXP (PATTERN (next
), 0, 0))) == IF_THEN_ELSE
)
4107 code
= GET_CODE (XEXP (SET_SRC (XVECEXP (PATTERN (next
), 0, 0)), 0));
4109 code
= GET_CODE (SET_SRC (XVECEXP (PATTERN (next
), 0, 0)));
4114 xops
[0] = gen_rtx_REG (QImode
, 0);
4119 xops
[1] = GEN_INT (0x45);
4120 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
4125 xops
[1] = GEN_INT (0x45);
4126 xops
[2] = GEN_INT (0x01);
4127 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
4128 output_asm_insn (AS2 (cmp
%B0
,%2,%h0
), xops
);
4133 xops
[1] = GEN_INT (0x05);
4134 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
4139 xops
[1] = GEN_INT (0x45);
4140 xops
[2] = GEN_INT (0x40);
4141 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
4142 output_asm_insn (AS1 (dec
%B0
,%h0
), xops
);
4143 output_asm_insn (AS2 (cmp
%B0
,%2,%h0
), xops
);
4148 xops
[1] = GEN_INT (0x45);
4149 xops
[2] = GEN_INT (0x40);
4150 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
4151 output_asm_insn (AS2 (cmp
%B0
,%2,%h0
), xops
);
4156 xops
[1] = GEN_INT (0x44);
4157 xops
[2] = GEN_INT (0x40);
4158 output_asm_insn (AS2 (and%B0
,%1,%h0
), xops
);
4159 output_asm_insn (AS2 (xor%B0
,%2,%h0
), xops
);
4174 #define MAX_386_STACK_LOCALS 2
4176 static rtx i386_stack_locals
[(int) MAX_MACHINE_MODE
][MAX_386_STACK_LOCALS
];
4178 /* Define the structure for the machine field in struct function. */
4179 struct machine_function
4181 rtx i386_stack_locals
[(int) MAX_MACHINE_MODE
][MAX_386_STACK_LOCALS
];
4184 /* Functions to save and restore i386_stack_locals.
4185 These will be called, via pointer variables,
4186 from push_function_context and pop_function_context. */
4189 save_386_machine_status (p
)
4192 p
->machine
= (struct machine_function
*) xmalloc (sizeof i386_stack_locals
);
4193 bcopy ((char *) i386_stack_locals
, (char *) p
->machine
->i386_stack_locals
,
4194 sizeof i386_stack_locals
);
4198 restore_386_machine_status (p
)
4201 bcopy ((char *) p
->machine
->i386_stack_locals
, (char *) i386_stack_locals
,
4202 sizeof i386_stack_locals
);
4206 /* Clear stack slot assignments remembered from previous functions.
4207 This is called from INIT_EXPANDERS once before RTL is emitted for each
4211 clear_386_stack_locals ()
4213 enum machine_mode mode
;
4216 for (mode
= VOIDmode
; (int) mode
< (int) MAX_MACHINE_MODE
;
4217 mode
= (enum machine_mode
) ((int) mode
+ 1))
4218 for (n
= 0; n
< MAX_386_STACK_LOCALS
; n
++)
4219 i386_stack_locals
[(int) mode
][n
] = NULL_RTX
;
4221 /* Arrange to save and restore i386_stack_locals around nested functions. */
4222 save_machine_status
= save_386_machine_status
;
4223 restore_machine_status
= restore_386_machine_status
;
4226 /* Return a MEM corresponding to a stack slot with mode MODE.
4227 Allocate a new slot if necessary.
4229 The RTL for a function can have several slots available: N is
4230 which slot to use. */
4233 assign_386_stack_local (mode
, n
)
4234 enum machine_mode mode
;
4237 if (n
< 0 || n
>= MAX_386_STACK_LOCALS
)
4240 if (i386_stack_locals
[(int) mode
][n
] == NULL_RTX
)
4241 i386_stack_locals
[(int) mode
][n
]
4242 = assign_stack_local (mode
, GET_MODE_SIZE (mode
), 0);
4244 return i386_stack_locals
[(int) mode
][n
];
4249 enum machine_mode mode
;
4251 return (GET_CODE (op
) == MULT
);
4256 enum machine_mode mode
;
4258 return (GET_CODE (op
) == DIV
);
4262 /* Create a new copy of an rtx.
4263 Recursively copies the operands of the rtx,
4264 except for those few rtx codes that are sharable.
4265 Doesn't share CONST */
4273 register RTX_CODE code
;
4274 register char *format_ptr
;
4276 code
= GET_CODE (orig
);
4289 /* SCRATCH must be shared because they represent distinct values. */
4294 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
4295 a LABEL_REF, it isn't sharable. */
4296 if (GET_CODE (XEXP (orig
, 0)) == PLUS
4297 && GET_CODE (XEXP (XEXP (orig
, 0), 0)) == SYMBOL_REF
4298 && GET_CODE (XEXP (XEXP (orig
, 0), 1)) == CONST_INT
)
4302 /* A MEM with a constant address is not sharable. The problem is that
4303 the constant address may need to be reloaded. If the mem is shared,
4304 then reloading one copy of this mem will cause all copies to appear
4305 to have been reloaded. */
4308 copy
= rtx_alloc (code
);
4309 PUT_MODE (copy
, GET_MODE (orig
));
4310 copy
->in_struct
= orig
->in_struct
;
4311 copy
->volatil
= orig
->volatil
;
4312 copy
->unchanging
= orig
->unchanging
;
4313 copy
->integrated
= orig
->integrated
;
4315 copy
->is_spill_rtx
= orig
->is_spill_rtx
;
4317 format_ptr
= GET_RTX_FORMAT (GET_CODE (copy
));
4319 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (copy
)); i
++)
4321 switch (*format_ptr
++)
4324 XEXP (copy
, i
) = XEXP (orig
, i
);
4325 if (XEXP (orig
, i
) != NULL
)
4326 XEXP (copy
, i
) = copy_rtx (XEXP (orig
, i
));
4331 XEXP (copy
, i
) = XEXP (orig
, i
);
4336 XVEC (copy
, i
) = XVEC (orig
, i
);
4337 if (XVEC (orig
, i
) != NULL
)
4339 XVEC (copy
, i
) = rtvec_alloc (XVECLEN (orig
, i
));
4340 for (j
= 0; j
< XVECLEN (copy
, i
); j
++)
4341 XVECEXP (copy
, i
, j
) = copy_rtx (XVECEXP (orig
, i
, j
));
4346 XWINT (copy
, i
) = XWINT (orig
, i
);
4350 XINT (copy
, i
) = XINT (orig
, i
);
4355 XSTR (copy
, i
) = XSTR (orig
, i
);
4366 /* Try to rewrite a memory address to make it valid */
4369 rewrite_address (mem_rtx
)
4372 rtx index_rtx
, base_rtx
, offset_rtx
, scale_rtx
, ret_rtx
;
4374 int offset_adjust
= 0;
4375 int was_only_offset
= 0;
4376 rtx mem_addr
= XEXP (mem_rtx
, 0);
4377 char *storage
= oballoc (0);
4379 int is_spill_rtx
= 0;
4381 in_struct
= MEM_IN_STRUCT_P (mem_rtx
);
4382 is_spill_rtx
= RTX_IS_SPILL_P (mem_rtx
);
4384 if (GET_CODE (mem_addr
) == PLUS
4385 && GET_CODE (XEXP (mem_addr
, 1)) == PLUS
4386 && GET_CODE (XEXP (XEXP (mem_addr
, 1), 0)) == REG
)
4388 /* This part is utilized by the combiner. */
4390 = gen_rtx_PLUS (GET_MODE (mem_addr
),
4391 gen_rtx_PLUS (GET_MODE (XEXP (mem_addr
, 1)),
4393 XEXP (XEXP (mem_addr
, 1), 0)),
4394 XEXP (XEXP (mem_addr
, 1), 1));
4396 if (memory_address_p (GET_MODE (mem_rtx
), ret_rtx
))
4398 XEXP (mem_rtx
, 0) = ret_rtx
;
4399 RTX_IS_SPILL_P (ret_rtx
) = is_spill_rtx
;
4406 /* This part is utilized by loop.c.
4407 If the address contains PLUS (reg,const) and this pattern is invalid
4408 in this case - try to rewrite the address to make it valid. */
4409 storage
= oballoc (0);
4410 index_rtx
= base_rtx
= offset_rtx
= NULL
;
4412 /* Find the base index and offset elements of the memory address. */
4413 if (GET_CODE (mem_addr
) == PLUS
)
4415 if (GET_CODE (XEXP (mem_addr
, 0)) == REG
)
4417 if (GET_CODE (XEXP (mem_addr
, 1)) == REG
)
4418 base_rtx
= XEXP (mem_addr
, 1), index_rtx
= XEXP (mem_addr
, 0);
4420 base_rtx
= XEXP (mem_addr
, 0), offset_rtx
= XEXP (mem_addr
, 1);
4423 else if (GET_CODE (XEXP (mem_addr
, 0)) == MULT
)
4425 index_rtx
= XEXP (mem_addr
, 0);
4426 if (GET_CODE (XEXP (mem_addr
, 1)) == REG
)
4427 base_rtx
= XEXP (mem_addr
, 1);
4429 offset_rtx
= XEXP (mem_addr
, 1);
4432 else if (GET_CODE (XEXP (mem_addr
, 0)) == PLUS
)
4434 if (GET_CODE (XEXP (XEXP (mem_addr
, 0), 0)) == PLUS
4435 && GET_CODE (XEXP (XEXP (XEXP (mem_addr
, 0), 0), 0)) == MULT
4436 && (GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr
, 0), 0), 0), 0))
4438 && (GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr
, 0), 0), 0), 1))
4440 && (GET_CODE (XEXP (XEXP (XEXP (mem_addr
, 0), 0), 1))
4442 && GET_CODE (XEXP (XEXP (mem_addr
, 0), 1)) == REG
4443 && GET_CODE (XEXP (mem_addr
, 1)) == SYMBOL_REF
)
4445 index_rtx
= XEXP (XEXP (XEXP (mem_addr
, 0), 0), 0);
4446 offset_rtx
= XEXP (mem_addr
, 1);
4447 base_rtx
= XEXP (XEXP (mem_addr
, 0), 1);
4448 offset_adjust
= INTVAL (XEXP (XEXP (XEXP (mem_addr
, 0), 0), 1));
4452 offset_rtx
= XEXP (mem_addr
, 1);
4453 index_rtx
= XEXP (XEXP (mem_addr
, 0), 0);
4454 base_rtx
= XEXP (XEXP (mem_addr
, 0), 1);
4458 else if (GET_CODE (XEXP (mem_addr
, 0)) == CONST_INT
)
4460 was_only_offset
= 1;
4463 offset_rtx
= XEXP (mem_addr
, 1);
4464 offset_adjust
= INTVAL (XEXP (mem_addr
, 0));
4465 if (offset_adjust
== 0)
4467 XEXP (mem_rtx
, 0) = offset_rtx
;
4468 RTX_IS_SPILL_P (XEXP (mem_rtx
, 0)) = is_spill_rtx
;
4478 else if (GET_CODE (mem_addr
) == MULT
)
4479 index_rtx
= mem_addr
;
4486 if (index_rtx
!= 0 && GET_CODE (index_rtx
) == MULT
)
4488 if (GET_CODE (XEXP (index_rtx
, 1)) != CONST_INT
)
4494 scale_rtx
= XEXP (index_rtx
, 1);
4495 scale
= INTVAL (scale_rtx
);
4496 index_rtx
= copy_all_rtx (XEXP (index_rtx
, 0));
4499 /* Now find which of the elements are invalid and try to fix them. */
4500 if (index_rtx
&& GET_CODE (index_rtx
) == CONST_INT
&& base_rtx
== NULL
)
4502 offset_adjust
= INTVAL (index_rtx
) * scale
;
4504 if (offset_rtx
!= 0 && CONSTANT_P (offset_rtx
))
4505 offset_rtx
= plus_constant (offset_rtx
, offset_adjust
);
4506 else if (offset_rtx
== 0)
4507 offset_rtx
= const0_rtx
;
4509 RTX_IS_SPILL_P (XEXP (mem_rtx
, 0)) = is_spill_rtx
;
4510 XEXP (mem_rtx
, 0) = offset_rtx
;
4514 if (base_rtx
&& GET_CODE (base_rtx
) == PLUS
4515 && GET_CODE (XEXP (base_rtx
, 0)) == REG
4516 && GET_CODE (XEXP (base_rtx
, 1)) == CONST_INT
)
4518 offset_adjust
+= INTVAL (XEXP (base_rtx
, 1));
4519 base_rtx
= copy_all_rtx (XEXP (base_rtx
, 0));
4522 else if (base_rtx
&& GET_CODE (base_rtx
) == CONST_INT
)
4524 offset_adjust
+= INTVAL (base_rtx
);
4528 if (index_rtx
&& GET_CODE (index_rtx
) == PLUS
4529 && GET_CODE (XEXP (index_rtx
, 0)) == REG
4530 && GET_CODE (XEXP (index_rtx
, 1)) == CONST_INT
)
4532 offset_adjust
+= INTVAL (XEXP (index_rtx
, 1)) * scale
;
4533 index_rtx
= copy_all_rtx (XEXP (index_rtx
, 0));
4538 if (! LEGITIMATE_INDEX_P (index_rtx
)
4539 && ! (index_rtx
== stack_pointer_rtx
&& scale
== 1
4540 && base_rtx
== NULL
))
4549 if (! LEGITIMATE_INDEX_P (base_rtx
) && GET_CODE (base_rtx
) != REG
)
4556 if (offset_adjust
!= 0)
4558 if (offset_rtx
!= 0 && CONSTANT_P (offset_rtx
))
4559 offset_rtx
= plus_constant (offset_rtx
, offset_adjust
);
4561 offset_rtx
= const0_rtx
;
4569 ret_rtx
= gen_rtx_PLUS (GET_MODE (base_rtx
),
4570 gen_rtx_MULT (GET_MODE (index_rtx
),
4571 index_rtx
, scale_rtx
),
4574 if (GET_CODE (offset_rtx
) != CONST_INT
4575 || INTVAL (offset_rtx
) != 0)
4576 ret_rtx
= gen_rtx_PLUS (GET_MODE (ret_rtx
),
4577 ret_rtx
, offset_rtx
);
4581 ret_rtx
= gen_rtx_PLUS (GET_MODE (index_rtx
),
4582 index_rtx
, base_rtx
);
4584 if (GET_CODE (offset_rtx
) != CONST_INT
4585 || INTVAL (offset_rtx
) != 0)
4586 ret_rtx
= gen_rtx_PLUS (GET_MODE (ret_rtx
),
4587 ret_rtx
, offset_rtx
);
4594 ret_rtx
= gen_rtx_MULT (GET_MODE (index_rtx
),
4595 index_rtx
, scale_rtx
);
4597 if (GET_CODE (offset_rtx
) != CONST_INT
4598 || INTVAL (offset_rtx
) != 0)
4599 ret_rtx
= gen_rtx_PLUS (GET_MODE (ret_rtx
),
4600 ret_rtx
, offset_rtx
);
4604 if (GET_CODE (offset_rtx
) == CONST_INT
4605 && INTVAL (offset_rtx
) == 0)
4606 ret_rtx
= index_rtx
;
4608 ret_rtx
= gen_rtx (PLUS
, GET_MODE (index_rtx
),
4609 index_rtx
, offset_rtx
);
4617 if (GET_CODE (offset_rtx
) == CONST_INT
4618 && INTVAL (offset_rtx
) == 0)
4621 ret_rtx
= gen_rtx (PLUS
, GET_MODE (base_rtx
), base_rtx
,
4624 else if (was_only_offset
)
4625 ret_rtx
= offset_rtx
;
4633 XEXP (mem_rtx
, 0) = ret_rtx
;
4634 RTX_IS_SPILL_P (XEXP (mem_rtx
, 0)) = is_spill_rtx
;
4645 /* Return 1 if the first insn to set cc before INSN also sets the register
4646 REG_RTX; otherwise return 0. */
4648 last_to_set_cc (reg_rtx
, insn
)
4651 rtx prev_insn
= PREV_INSN (insn
);
4655 if (GET_CODE (prev_insn
) == NOTE
)
4658 else if (GET_CODE (prev_insn
) == INSN
)
4660 if (GET_CODE (PATTERN (prev_insn
)) != SET
)
4663 if (rtx_equal_p (SET_DEST (PATTERN (prev_insn
)), reg_rtx
))
4665 if (sets_condition_code (SET_SRC (PATTERN (prev_insn
))))
4671 else if (! doesnt_set_condition_code (SET_SRC (PATTERN (prev_insn
))))
4678 prev_insn
= PREV_INSN (prev_insn
);
4685 doesnt_set_condition_code (pat
)
4688 switch (GET_CODE (pat
))
4701 sets_condition_code (pat
)
4704 switch (GET_CODE (pat
))
4726 str_immediate_operand (op
, mode
)
4728 enum machine_mode mode
;
4730 if (GET_CODE (op
) == CONST_INT
&& INTVAL (op
) <= 32 && INTVAL (op
) >= 0)
4740 if (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == SET
4741 && (GET_MODE (SET_DEST (PATTERN (insn
))) == DFmode
4742 || GET_MODE (SET_DEST (PATTERN (insn
))) == SFmode
4743 || GET_MODE (SET_DEST (PATTERN (insn
))) == XFmode
))
4749 /* Return 1 if the mode of the SET_DEST of insn is floating point
4750 and it is not an fld or a move from memory to memory.
4751 Otherwise return 0 */
4757 if (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == SET
4758 && (GET_MODE (SET_DEST (PATTERN (insn
))) == DFmode
4759 || GET_MODE (SET_DEST (PATTERN (insn
))) == SFmode
4760 || GET_MODE (SET_DEST (PATTERN (insn
))) == XFmode
)
4761 && GET_CODE (SET_DEST (PATTERN (insn
))) == REG
4762 && REGNO (SET_DEST (PATTERN (insn
))) >= FIRST_FLOAT_REG
4763 && GET_CODE (SET_SRC (insn
)) != MEM
)
4769 /* Return 1 if the mode of the SET_DEST of INSN is floating point and is
4770 memory and the source is a register. */
4776 if (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == SET
4777 && (GET_MODE (SET_DEST (PATTERN (insn
))) == DFmode
4778 || GET_MODE (SET_DEST (PATTERN (insn
))) == SFmode
4779 || GET_MODE (SET_DEST (PATTERN (insn
))) == XFmode
)
4780 && GET_CODE (SET_DEST (PATTERN (insn
))) == MEM
4781 && GET_CODE (SET_SRC (PATTERN (insn
))) == REG
)
4787 /* Return 1 if DEP_INSN sets a register which INSN uses as a base
4788 or index to reference memory.
4789 otherwise return 0 */
4792 agi_dependent (insn
, dep_insn
)
4795 if (GET_CODE (dep_insn
) == INSN
4796 && GET_CODE (PATTERN (dep_insn
)) == SET
4797 && GET_CODE (SET_DEST (PATTERN (dep_insn
))) == REG
)
4798 return reg_mentioned_in_mem (SET_DEST (PATTERN (dep_insn
)), insn
);
4800 if (GET_CODE (dep_insn
) == INSN
&& GET_CODE (PATTERN (dep_insn
)) == SET
4801 && GET_CODE (SET_DEST (PATTERN (dep_insn
))) == MEM
4802 && push_operand (SET_DEST (PATTERN (dep_insn
)),
4803 GET_MODE (SET_DEST (PATTERN (dep_insn
)))))
4804 return reg_mentioned_in_mem (stack_pointer_rtx
, insn
);
4809 /* Return 1 if reg is used in rtl as a base or index for a memory ref
4810 otherwise return 0. */
4813 reg_mentioned_in_mem (reg
, rtl
)
4818 register enum rtx_code code
;
4823 code
= GET_CODE (rtl
);
4839 if (code
== MEM
&& reg_mentioned_p (reg
, rtl
))
4842 fmt
= GET_RTX_FORMAT (code
);
4843 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
4846 for (j
= XVECLEN (rtl
, i
) - 1; j
>= 0; j
--)
4847 if (reg_mentioned_in_mem (reg
, XVECEXP (rtl
, i
, j
)))
4850 else if (fmt
[i
] == 'e' && reg_mentioned_in_mem (reg
, XEXP (rtl
, i
)))
4857 /* Output the appropriate insns for doing strlen if not just doing repnz; scasb
4859 operands[0] = result, initialized with the startaddress
4860 operands[1] = alignment of the address.
4861 operands[2] = scratch register, initialized with the startaddress when
4862 not aligned, otherwise undefined
4864 This is just the body. It needs the initialisations mentioned above and
4865 some address computing at the end. These things are done in i386.md. */
4868 output_strlen_unroll (operands
)
4873 xops
[0] = operands
[0]; /* Result */
4874 /* operands[1]; * Alignment */
4875 xops
[1] = operands
[2]; /* Scratch */
4876 xops
[2] = GEN_INT (0);
4877 xops
[3] = GEN_INT (2);
4878 xops
[4] = GEN_INT (3);
4879 xops
[5] = GEN_INT (4);
4880 /* xops[6] = gen_label_rtx (); * label when aligned to 3-byte */
4881 /* xops[7] = gen_label_rtx (); * label when aligned to 2-byte */
4882 xops
[8] = gen_label_rtx (); /* label of main loop */
4884 if (TARGET_USE_Q_REG
&& QI_REG_P (xops
[1]))
4885 xops
[9] = gen_label_rtx (); /* pentium optimisation */
4887 xops
[10] = gen_label_rtx (); /* end label 2 */
4888 xops
[11] = gen_label_rtx (); /* end label 1 */
4889 xops
[12] = gen_label_rtx (); /* end label */
4890 /* xops[13] * Temporary used */
4891 xops
[14] = GEN_INT (0xff);
4892 xops
[15] = GEN_INT (0xff00);
4893 xops
[16] = GEN_INT (0xff0000);
4894 xops
[17] = GEN_INT (0xff000000);
4896 /* Loop to check 1..3 bytes for null to get an aligned pointer. */
4898 /* Is there a known alignment and is it less than 4? */
4899 if (GET_CODE (operands
[1]) != CONST_INT
|| INTVAL (operands
[1]) < 4)
4901 /* Is there a known alignment and is it not 2? */
4902 if (GET_CODE (operands
[1]) != CONST_INT
|| INTVAL (operands
[1]) != 2)
4904 xops
[6] = gen_label_rtx (); /* Label when aligned to 3-byte */
4905 xops
[7] = gen_label_rtx (); /* Label when aligned to 2-byte */
4907 /* Leave just the 3 lower bits.
4908 If this is a q-register, then the high part is used later
4909 therefore use andl rather than andb. */
4910 output_asm_insn (AS2 (and%L1
,%4,%1), xops
);
4912 /* Is aligned to 4-byte address when zero */
4913 output_asm_insn (AS1 (je
,%l8
), xops
);
4915 /* Side-effect even Parity when %eax == 3 */
4916 output_asm_insn (AS1 (jp
,%6), xops
);
4918 /* Is it aligned to 2 bytes ? */
4919 if (QI_REG_P (xops
[1]))
4920 output_asm_insn (AS2 (cmp
%L1
,%3,%1), xops
);
4922 output_asm_insn (AS2 (cmp
%L1
,%3,%1), xops
);
4924 output_asm_insn (AS1 (je
,%7), xops
);
4928 /* Since the alignment is 2, we have to check 2 or 0 bytes;
4929 check if is aligned to 4 - byte. */
4930 output_asm_insn (AS2 (and%L1
,%3,%1), xops
);
4932 /* Is aligned to 4-byte address when zero */
4933 output_asm_insn (AS1 (je
,%l8
), xops
);
4936 xops
[13] = gen_rtx (MEM
, QImode
, xops
[0]);
4938 /* Now compare the bytes; compare with the high part of a q-reg
4939 gives shorter code. */
4940 if (QI_REG_P (xops
[1]))
4942 /* Compare the first n unaligned byte on a byte per byte basis. */
4943 output_asm_insn (AS2 (cmp
%B1
,%h1
,%13), xops
);
4945 /* When zero we reached the end. */
4946 output_asm_insn (AS1 (je
,%l12
), xops
);
4948 /* Increment the address. */
4949 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
4951 /* Not needed with an alignment of 2 */
4952 if (GET_CODE (operands
[1]) != CONST_INT
|| INTVAL (operands
[1]) != 2)
4954 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L",
4955 CODE_LABEL_NUMBER (xops
[7]));
4956 output_asm_insn (AS2 (cmp
%B1
,%h1
,%13), xops
);
4957 output_asm_insn (AS1 (je
,%l12
), xops
);
4958 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
4960 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L",
4961 CODE_LABEL_NUMBER (xops
[6]));
4964 output_asm_insn (AS2 (cmp
%B1
,%h1
,%13), xops
);
4968 output_asm_insn (AS2 (cmp
%B13
,%2,%13), xops
);
4969 output_asm_insn (AS1 (je
,%l12
), xops
);
4970 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
4972 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L",
4973 CODE_LABEL_NUMBER (xops
[7]));
4974 output_asm_insn (AS2 (cmp
%B13
,%2,%13), xops
);
4975 output_asm_insn (AS1 (je
,%l12
), xops
);
4976 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
4978 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L",
4979 CODE_LABEL_NUMBER (xops
[6]));
4980 output_asm_insn (AS2 (cmp
%B13
,%2,%13), xops
);
4983 output_asm_insn (AS1 (je
,%l12
), xops
);
4984 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
4987 /* Generate loop to check 4 bytes at a time. It is not a good idea to
4988 align this loop. It gives only huge programs, but does not help to
4990 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[8]));
4992 xops
[13] = gen_rtx (MEM
, SImode
, xops
[0]);
4993 output_asm_insn (AS2 (mov
%L1
,%13,%1), xops
);
4995 if (QI_REG_P (xops
[1]))
4997 /* On i586 it is faster to combine the hi- and lo- part as
4998 a kind of lookahead. If anding both yields zero, then one
4999 of both *could* be zero, otherwise none of both is zero;
5000 this saves one instruction, on i486 this is slower
5001 tested with P-90, i486DX2-66, AMD486DX2-66 */
5004 output_asm_insn (AS2 (test
%B1
,%h1
,%b1
), xops
);
5005 output_asm_insn (AS1 (jne
,%l9
), xops
);
5008 /* Check first byte. */
5009 output_asm_insn (AS2 (test
%B1
,%b1
,%b1
), xops
);
5010 output_asm_insn (AS1 (je
,%l12
), xops
);
5012 /* Check second byte. */
5013 output_asm_insn (AS2 (test
%B1
,%h1
,%h1
), xops
);
5014 output_asm_insn (AS1 (je
,%l11
), xops
);
5017 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L",
5018 CODE_LABEL_NUMBER (xops
[9]));
5023 /* Check first byte. */
5024 output_asm_insn (AS2 (test
%L1
,%14,%1), xops
);
5025 output_asm_insn (AS1 (je
,%l12
), xops
);
5027 /* Check second byte. */
5028 output_asm_insn (AS2 (test
%L1
,%15,%1), xops
);
5029 output_asm_insn (AS1 (je
,%l11
), xops
);
5032 /* Check third byte. */
5033 output_asm_insn (AS2 (test
%L1
,%16,%1), xops
);
5034 output_asm_insn (AS1 (je
,%l10
), xops
);
5036 /* Check fourth byte and increment address. */
5037 output_asm_insn (AS2 (add
%L0
,%5,%0), xops
);
5038 output_asm_insn (AS2 (test
%L1
,%17,%1), xops
);
5039 output_asm_insn (AS1 (jne
,%l8
), xops
);
5041 /* Now generate fixups when the compare stops within a 4-byte word. */
5042 output_asm_insn (AS2 (sub
%L0
,%4,%0), xops
);
5044 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[10]));
5045 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
5047 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[11]));
5048 output_asm_insn (AS1 (inc
%L0
,%0), xops
);
5050 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file
, "L", CODE_LABEL_NUMBER (xops
[12]));