* i386.c (notice_update_cc): Remove bogus pentium GCC code.
[official-gcc.git] / gcc / config / i386 / i386.c
blobcda52fad9711479956bbb94519f527d123af680a
1 /* Subroutines for insn-output.c for Intel X86.
2 Copyright (C) 1988, 1992, 1994, 1995, 1996 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
21 #include <stdio.h>
22 #include <setjmp.h>
23 #include <ctype.h>
24 #include "config.h"
25 #include "rtl.h"
26 #include "regs.h"
27 #include "hard-reg-set.h"
28 #include "real.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-flags.h"
32 #include "output.h"
33 #include "insn-attr.h"
34 #include "tree.h"
35 #include "flags.h"
36 #include "except.h"
37 #include "function.h"
39 #ifdef EXTRA_CONSTRAINT
40 /* If EXTRA_CONSTRAINT is defined, then the 'S'
41 constraint in REG_CLASS_FROM_LETTER will no longer work, and various
42 asm statements that need 'S' for class SIREG will break. */
43 error EXTRA_CONSTRAINT conflicts with S constraint letter
44 /* The previous line used to be #error, but some compilers barf
45 even if the conditional was untrue. */
46 #endif
48 #ifndef CHECK_STACK_LIMIT
49 #define CHECK_STACK_LIMIT -1
50 #endif
52 enum reg_mem /* Type of an operand for ix86_{binary,unary}_operator_ok */
54 reg_p,
55 mem_p,
56 imm_p
59 /* Processor costs (relative to an add) */
60 struct processor_costs i386_cost = { /* 386 specific costs */
61 1, /* cost of an add instruction (2 cycles) */
62 1, /* cost of a lea instruction */
63 3, /* variable shift costs */
64 2, /* constant shift costs */
65 6, /* cost of starting a multiply */
66 1, /* cost of multiply per each bit set */
67 23 /* cost of a divide/mod */
70 struct processor_costs i486_cost = { /* 486 specific costs */
71 1, /* cost of an add instruction */
72 1, /* cost of a lea instruction */
73 3, /* variable shift costs */
74 2, /* constant shift costs */
75 12, /* cost of starting a multiply */
76 1, /* cost of multiply per each bit set */
77 40 /* cost of a divide/mod */
80 struct processor_costs pentium_cost = {
81 1, /* cost of an add instruction */
82 1, /* cost of a lea instruction */
83 4, /* variable shift costs */
84 1, /* constant shift costs */
85 11, /* cost of starting a multiply */
86 0, /* cost of multiply per each bit set */
87 25 /* cost of a divide/mod */
90 struct processor_costs pentiumpro_cost = {
91 1, /* cost of an add instruction */
92 1, /* cost of a lea instruction */
93 3, /* variable shift costs */
94 1, /* constant shift costs */
95 4, /* cost of starting a multiply */
96 0, /* cost of multiply per each bit set */
97 17 /* cost of a divide/mod */
100 struct processor_costs *ix86_cost = &pentium_cost;
102 #define AT_BP(mode) (gen_rtx (MEM, (mode), frame_pointer_rtx))
104 extern FILE *asm_out_file;
105 extern char *strcat ();
107 char *singlemove_string ();
108 char *output_move_const_single ();
109 char *output_fp_cc0_set ();
111 char *hi_reg_name[] = HI_REGISTER_NAMES;
112 char *qi_reg_name[] = QI_REGISTER_NAMES;
113 char *qi_high_reg_name[] = QI_HIGH_REGISTER_NAMES;
115 /* Array of the smallest class containing reg number REGNO, indexed by
116 REGNO. Used by REGNO_REG_CLASS in i386.h. */
118 enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
120 /* ax, dx, cx, bx */
121 AREG, DREG, CREG, BREG,
122 /* si, di, bp, sp */
123 SIREG, DIREG, INDEX_REGS, GENERAL_REGS,
124 /* FP registers */
125 FP_TOP_REG, FP_SECOND_REG, FLOAT_REGS, FLOAT_REGS,
126 FLOAT_REGS, FLOAT_REGS, FLOAT_REGS, FLOAT_REGS,
127 /* arg pointer */
128 INDEX_REGS
131 /* Test and compare insns in i386.md store the information needed to
132 generate branch and scc insns here. */
134 struct rtx_def *i386_compare_op0 = NULL_RTX;
135 struct rtx_def *i386_compare_op1 = NULL_RTX;
136 struct rtx_def *(*i386_compare_gen)(), *(*i386_compare_gen_eq)();
138 /* which cpu are we scheduling for */
139 enum processor_type ix86_cpu;
141 /* which instruction set architecture to use. */
142 int ix86_arch;
144 /* Strings to hold which cpu and instruction set architecture to use. */
145 char *ix86_cpu_string; /* for -mcpu=<xxx> */
146 char *ix86_arch_string; /* for -march=<xxx> */
148 /* Register allocation order */
149 char *i386_reg_alloc_order;
150 static char regs_allocated[FIRST_PSEUDO_REGISTER];
152 /* # of registers to use to pass arguments. */
153 char *i386_regparm_string; /* # registers to use to pass args */
154 int i386_regparm; /* i386_regparm_string as a number */
156 /* Alignment to use for loops and jumps */
157 char *i386_align_loops_string; /* power of two alignment for loops */
158 char *i386_align_jumps_string; /* power of two alignment for non-loop jumps */
159 char *i386_align_funcs_string; /* power of two alignment for functions */
160 char *i386_branch_cost_string; /* values 1-5: see jump.c */
162 int i386_align_loops; /* power of two alignment for loops */
163 int i386_align_jumps; /* power of two alignment for non-loop jumps */
164 int i386_align_funcs; /* power of two alignment for functions */
165 int i386_branch_cost; /* values 1-5: see jump.c */
167 /* Sometimes certain combinations of command options do not make
168 sense on a particular target machine. You can define a macro
169 `OVERRIDE_OPTIONS' to take account of this. This macro, if
170 defined, is executed once just after all the command options have
171 been parsed.
173 Don't use this macro to turn on various extra optimizations for
174 `-O'. That is what `OPTIMIZATION_OPTIONS' is for. */
176 void
177 override_options ()
179 int ch, i, j, regno;
180 char *p;
181 int def_align;
183 static struct ptt
185 char *name; /* Canonical processor name. */
186 enum processor_type processor; /* Processor type enum value. */
187 struct processor_costs *cost; /* Processor costs */
188 int target_enable; /* Target flags to enable. */
189 int target_disable; /* Target flags to disable. */
190 } processor_target_table[]
191 = {{PROCESSOR_I386_STRING, PROCESSOR_I386, &i386_cost, 0, 0},
192 {PROCESSOR_I486_STRING, PROCESSOR_I486, &i486_cost, 0, 0},
193 {PROCESSOR_I586_STRING, PROCESSOR_PENTIUM, &pentium_cost, 0, 0},
194 {PROCESSOR_PENTIUM_STRING, PROCESSOR_PENTIUM, &pentium_cost, 0, 0},
195 {PROCESSOR_I686_STRING, PROCESSOR_PENTIUMPRO, &pentiumpro_cost, 0, 0},
196 {PROCESSOR_PENTIUMPRO_STRING, PROCESSOR_PENTIUMPRO, &pentiumpro_cost, 0, 0}};
198 int ptt_size = sizeof (processor_target_table) / sizeof (struct ptt);
200 #ifdef SUBTARGET_OVERRIDE_OPTIONS
201 SUBTARGET_OVERRIDE_OPTIONS;
202 #endif
204 /* Validate registers in register allocation order */
205 if (i386_reg_alloc_order)
207 for (i = 0; (ch = i386_reg_alloc_order[i]) != '\0'; i++)
209 switch (ch)
211 case 'a': regno = 0; break;
212 case 'd': regno = 1; break;
213 case 'c': regno = 2; break;
214 case 'b': regno = 3; break;
215 case 'S': regno = 4; break;
216 case 'D': regno = 5; break;
217 case 'B': regno = 6; break;
219 default: fatal ("Register '%c' is unknown", ch);
222 if (regs_allocated[regno])
223 fatal ("Register '%c' was already specified in the allocation order", ch);
225 regs_allocated[regno] = 1;
229 if (ix86_arch_string == (char *)0)
231 ix86_arch_string = PROCESSOR_PENTIUM_STRING;
232 if (ix86_cpu_string == (char *)0)
233 ix86_cpu_string = PROCESSOR_DEFAULT_STRING;
236 for (i = 0; i < ptt_size; i++)
237 if (! strcmp (ix86_arch_string, processor_target_table[i].name))
239 ix86_arch = processor_target_table[i].processor;
240 if (ix86_cpu_string == (char *)0)
241 ix86_cpu_string = processor_target_table[i].name;
242 break;
245 if (i == ptt_size)
247 error ("bad value (%s) for -march= switch", ix86_arch_string);
248 ix86_arch_string = PROCESSOR_PENTIUM_STRING;
249 ix86_arch = PROCESSOR_DEFAULT;
252 if (ix86_cpu_string == (char *)0)
253 ix86_cpu_string = PROCESSOR_DEFAULT_STRING;
255 for (j = 0; j < ptt_size; j++)
256 if (! strcmp (ix86_cpu_string, processor_target_table[j].name))
258 ix86_cpu = processor_target_table[j].processor;
259 ix86_cost = processor_target_table[j].cost;
260 if (i > j && (int)ix86_arch >= (int)PROCESSOR_PENTIUMPRO)
261 error ("-mcpu=%s does not support -march=%s", ix86_cpu_string, ix86_arch_string);
263 target_flags |= processor_target_table[j].target_enable;
264 target_flags &= ~processor_target_table[j].target_disable;
265 break;
268 if (j == ptt_size)
270 error ("bad value (%s) for -mcpu= switch", ix86_cpu_string);
271 ix86_cpu_string = PROCESSOR_DEFAULT_STRING;
272 ix86_cpu = PROCESSOR_DEFAULT;
274 /* Validate -mregparm= value */
275 if (i386_regparm_string)
277 i386_regparm = atoi (i386_regparm_string);
278 if (i386_regparm < 0 || i386_regparm > REGPARM_MAX)
279 fatal ("-mregparm=%d is not between 0 and %d", i386_regparm, REGPARM_MAX);
282 /* The 486 suffers more from non-aligned cache line fills, and the larger code
283 size results in a larger cache foot-print and more misses. The 486 has a
284 16 byte cache line, pentium and pentiumpro have a 32 byte cache line */
285 def_align = (TARGET_486) ? 4 : 2;
287 /* Validate -malign-loops= value, or provide default */
288 if (i386_align_loops_string)
290 i386_align_loops = atoi (i386_align_loops_string);
291 if (i386_align_loops < 0 || i386_align_loops > MAX_CODE_ALIGN)
292 fatal ("-malign-loops=%d is not between 0 and %d",
293 i386_align_loops, MAX_CODE_ALIGN);
295 else
296 i386_align_loops = 2;
298 /* Validate -malign-jumps= value, or provide default */
299 if (i386_align_jumps_string)
301 i386_align_jumps = atoi (i386_align_jumps_string);
302 if (i386_align_jumps < 0 || i386_align_jumps > MAX_CODE_ALIGN)
303 fatal ("-malign-jumps=%d is not between 0 and %d",
304 i386_align_jumps, MAX_CODE_ALIGN);
306 else
307 i386_align_jumps = def_align;
309 /* Validate -malign-functions= value, or provide default */
310 if (i386_align_funcs_string)
312 i386_align_funcs = atoi (i386_align_funcs_string);
313 if (i386_align_funcs < 0 || i386_align_funcs > MAX_CODE_ALIGN)
314 fatal ("-malign-functions=%d is not between 0 and %d",
315 i386_align_funcs, MAX_CODE_ALIGN);
317 else
318 i386_align_funcs = def_align;
320 /* Validate -mbranch-cost= value, or provide default */
321 if (i386_branch_cost_string)
323 i386_branch_cost = atoi (i386_branch_cost_string);
324 if (i386_branch_cost < 0 || i386_branch_cost > 5)
325 fatal ("-mbranch-cost=%d is not between 0 and 5",
326 i386_branch_cost);
328 else
329 i386_branch_cost = 1;
331 if (TARGET_OMIT_LEAF_FRAME_POINTER) /* keep nonleaf frame pointers */
332 flag_omit_frame_pointer = 1;
334 /* pic references don't explicitly mention pic_offset_table_rtx */
335 /* code threaded into the prologue may conflict with profiling */
336 if (flag_pic || profile_flag || profile_block_flag)
337 target_flags &= ~MASK_SCHEDULE_PROLOGUE;
340 /* A C statement (sans semicolon) to choose the order in which to
341 allocate hard registers for pseudo-registers local to a basic
342 block.
344 Store the desired register order in the array `reg_alloc_order'.
345 Element 0 should be the register to allocate first; element 1, the
346 next register; and so on.
348 The macro body should not assume anything about the contents of
349 `reg_alloc_order' before execution of the macro.
351 On most machines, it is not necessary to define this macro. */
353 void
354 order_regs_for_local_alloc ()
356 int i, ch, order, regno;
358 /* User specified the register allocation order */
359 if (i386_reg_alloc_order)
361 for (i = order = 0; (ch = i386_reg_alloc_order[i]) != '\0'; i++)
363 switch (ch)
365 case 'a': regno = 0; break;
366 case 'd': regno = 1; break;
367 case 'c': regno = 2; break;
368 case 'b': regno = 3; break;
369 case 'S': regno = 4; break;
370 case 'D': regno = 5; break;
371 case 'B': regno = 6; break;
374 reg_alloc_order[order++] = regno;
377 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
379 if (!regs_allocated[i])
380 reg_alloc_order[order++] = i;
384 /* If users did not specify a register allocation order, use natural order */
385 else
387 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
388 reg_alloc_order[i] = i;
393 void
394 optimization_options (level)
395 int level;
397 /* For -O2, and beyond, turn off -fschedule-insns by default. It tends to
398 make the problem with not enough registers even worse */
399 #ifdef INSN_SCHEDULING
400 if (level > 1)
401 flag_schedule_insns = 0;
402 #endif
405 /* Sign-extend a 16-bit constant */
407 struct rtx_def *
408 i386_sext16_if_const (op)
409 struct rtx_def *op;
411 if (GET_CODE (op) == CONST_INT)
413 HOST_WIDE_INT val = INTVAL (op);
414 HOST_WIDE_INT sext_val;
415 if (val & 0x8000)
416 sext_val = val | ~0xffff;
417 else
418 sext_val = val & 0xffff;
419 if (sext_val != val)
420 op = GEN_INT (sext_val);
422 return op;
425 /* Return nonzero if the rtx is aligned */
427 static int
428 i386_aligned_reg_p (regno)
429 int regno;
431 return (regno == STACK_POINTER_REGNUM
432 || (!flag_omit_frame_pointer
433 && regno == FRAME_POINTER_REGNUM));
437 i386_aligned_p (op)
438 rtx op;
440 /* registers and immediate operands are always "aligned" */
441 if (GET_CODE (op) != MEM)
442 return 1;
444 /* Don't even try to do any aligned optimizations with volatiles */
445 if (MEM_VOLATILE_P (op))
446 return 0;
448 /* Get address of memory operand */
449 op = XEXP (op, 0);
451 switch (GET_CODE (op))
453 case CONST_INT:
454 if (INTVAL (op) & 3)
455 break;
456 return 1;
458 /* match "reg + offset" */
459 case PLUS:
460 if (GET_CODE (XEXP (op, 1)) != CONST_INT)
461 break;
462 if (INTVAL (XEXP (op, 1)) & 3)
463 break;
464 op = XEXP (op, 0);
465 if (GET_CODE (op) != REG)
466 break;
467 /* fall through */
468 case REG:
469 return i386_aligned_reg_p (REGNO (op));
471 return 0;
474 /* Return nonzero if INSN looks like it won't compute useful cc bits
475 as a side effect. This information is only a hint. */
478 i386_cc_probably_useless_p (insn)
479 rtx insn;
481 return !next_cc0_user (insn);
484 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
485 attribute for DECL. The attributes in ATTRIBUTES have previously been
486 assigned to DECL. */
489 i386_valid_decl_attribute_p (decl, attributes, identifier, args)
490 tree decl;
491 tree attributes;
492 tree identifier;
493 tree args;
495 return 0;
498 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
499 attribute for TYPE. The attributes in ATTRIBUTES have previously been
500 assigned to TYPE. */
503 i386_valid_type_attribute_p (type, attributes, identifier, args)
504 tree type;
505 tree attributes;
506 tree identifier;
507 tree args;
509 if (TREE_CODE (type) != FUNCTION_TYPE
510 && TREE_CODE (type) != FIELD_DECL
511 && TREE_CODE (type) != TYPE_DECL)
512 return 0;
514 /* Stdcall attribute says callee is responsible for popping arguments
515 if they are not variable. */
516 if (is_attribute_p ("stdcall", identifier))
517 return (args == NULL_TREE);
519 /* Cdecl attribute says the callee is a normal C declaration */
520 if (is_attribute_p ("cdecl", identifier))
521 return (args == NULL_TREE);
523 /* Regparm attribute specifies how many integer arguments are to be
524 passed in registers */
525 if (is_attribute_p ("regparm", identifier))
527 tree cst;
529 if (!args || TREE_CODE (args) != TREE_LIST
530 || TREE_CHAIN (args) != NULL_TREE
531 || TREE_VALUE (args) == NULL_TREE)
532 return 0;
534 cst = TREE_VALUE (args);
535 if (TREE_CODE (cst) != INTEGER_CST)
536 return 0;
538 if (TREE_INT_CST_HIGH (cst) != 0
539 || TREE_INT_CST_LOW (cst) < 0
540 || TREE_INT_CST_LOW (cst) > REGPARM_MAX)
541 return 0;
543 return 1;
546 return 0;
549 /* Return 0 if the attributes for two types are incompatible, 1 if they
550 are compatible, and 2 if they are nearly compatible (which causes a
551 warning to be generated). */
554 i386_comp_type_attributes (type1, type2)
555 tree type1;
556 tree type2;
558 return 1;
562 /* Value is the number of bytes of arguments automatically
563 popped when returning from a subroutine call.
564 FUNDECL is the declaration node of the function (as a tree),
565 FUNTYPE is the data type of the function (as a tree),
566 or for a library call it is an identifier node for the subroutine name.
567 SIZE is the number of bytes of arguments passed on the stack.
569 On the 80386, the RTD insn may be used to pop them if the number
570 of args is fixed, but if the number is variable then the caller
571 must pop them all. RTD can't be used for library calls now
572 because the library is compiled with the Unix compiler.
573 Use of RTD is a selectable option, since it is incompatible with
574 standard Unix calling sequences. If the option is not selected,
575 the caller must always pop the args.
577 The attribute stdcall is equivalent to RTD on a per module basis. */
580 i386_return_pops_args (fundecl, funtype, size)
581 tree fundecl;
582 tree funtype;
583 int size;
585 int rtd = TARGET_RTD && (!fundecl || TREE_CODE (fundecl) != IDENTIFIER_NODE);
587 /* Cdecl functions override -mrtd, and never pop the stack */
588 if (!lookup_attribute ("cdecl", TYPE_ATTRIBUTES (funtype))) {
590 /* Stdcall functions will pop the stack if not variable args */
591 if (lookup_attribute ("stdcall", TYPE_ATTRIBUTES (funtype)))
592 rtd = 1;
594 if (rtd
595 && (TYPE_ARG_TYPES (funtype) == NULL_TREE
596 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (funtype))) == void_type_node)))
597 return size;
600 /* Lose any fake structure return argument */
601 if (aggregate_value_p (TREE_TYPE (funtype)))
602 return GET_MODE_SIZE (Pmode);
604 return 0;
608 /* Argument support functions. */
610 /* Initialize a variable CUM of type CUMULATIVE_ARGS
611 for a call to a function whose data type is FNTYPE.
612 For a library call, FNTYPE is 0. */
614 void
615 init_cumulative_args (cum, fntype, libname)
616 CUMULATIVE_ARGS *cum; /* argument info to initialize */
617 tree fntype; /* tree ptr for function decl */
618 rtx libname; /* SYMBOL_REF of library name or 0 */
620 static CUMULATIVE_ARGS zero_cum;
621 tree param, next_param;
623 if (TARGET_DEBUG_ARG)
625 fprintf (stderr, "\ninit_cumulative_args (");
626 if (fntype)
628 tree ret_type = TREE_TYPE (fntype);
629 fprintf (stderr, "fntype code = %s, ret code = %s",
630 tree_code_name[ (int)TREE_CODE (fntype) ],
631 tree_code_name[ (int)TREE_CODE (ret_type) ]);
633 else
634 fprintf (stderr, "no fntype");
636 if (libname)
637 fprintf (stderr, ", libname = %s", XSTR (libname, 0));
640 *cum = zero_cum;
642 /* Set up the number of registers to use for passing arguments. */
643 cum->nregs = i386_regparm;
644 if (fntype)
646 tree attr = lookup_attribute ("regparm", TYPE_ATTRIBUTES (fntype));
647 if (attr)
648 cum->nregs = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (attr)));
651 /* Determine if this function has variable arguments. This is
652 indicated by the last argument being 'void_type_mode' if there
653 are no variable arguments. If there are variable arguments, then
654 we won't pass anything in registers */
656 if (cum->nregs)
658 for (param = (fntype) ? TYPE_ARG_TYPES (fntype) : 0;
659 param != (tree)0;
660 param = next_param)
662 next_param = TREE_CHAIN (param);
663 if (next_param == (tree)0 && TREE_VALUE (param) != void_type_node)
664 cum->nregs = 0;
668 if (TARGET_DEBUG_ARG)
669 fprintf (stderr, ", nregs=%d )\n", cum->nregs);
671 return;
674 /* Update the data in CUM to advance over an argument
675 of mode MODE and data type TYPE.
676 (TYPE is null for libcalls where that information may not be available.) */
678 void
679 function_arg_advance (cum, mode, type, named)
680 CUMULATIVE_ARGS *cum; /* current arg information */
681 enum machine_mode mode; /* current arg mode */
682 tree type; /* type of the argument or 0 if lib support */
683 int named; /* whether or not the argument was named */
685 int bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
686 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
688 if (TARGET_DEBUG_ARG)
689 fprintf (stderr,
690 "function_adv( size=%d, words=%2d, nregs=%d, mode=%4s, named=%d )\n\n",
691 words, cum->words, cum->nregs, GET_MODE_NAME (mode), named);
693 cum->words += words;
694 cum->nregs -= words;
695 cum->regno += words;
697 if (cum->nregs <= 0)
699 cum->nregs = 0;
700 cum->regno = 0;
703 return;
706 /* Define where to put the arguments to a function.
707 Value is zero to push the argument on the stack,
708 or a hard register in which to store the argument.
710 MODE is the argument's machine mode.
711 TYPE is the data type of the argument (as a tree).
712 This is null for libcalls where that information may
713 not be available.
714 CUM is a variable of type CUMULATIVE_ARGS which gives info about
715 the preceding args and about the function being called.
716 NAMED is nonzero if this argument is a named parameter
717 (otherwise it is an extra parameter matching an ellipsis). */
719 struct rtx_def *
720 function_arg (cum, mode, type, named)
721 CUMULATIVE_ARGS *cum; /* current arg information */
722 enum machine_mode mode; /* current arg mode */
723 tree type; /* type of the argument or 0 if lib support */
724 int named; /* != 0 for normal args, == 0 for ... args */
726 rtx ret = NULL_RTX;
727 int bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
728 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
730 switch (mode)
732 default: /* for now, pass fp/complex values on the stack */
733 break;
735 case BLKmode:
736 case DImode:
737 case SImode:
738 case HImode:
739 case QImode:
740 if (words <= cum->nregs)
741 ret = gen_rtx (REG, mode, cum->regno);
742 break;
745 if (TARGET_DEBUG_ARG)
747 fprintf (stderr,
748 "function_arg( size=%d, words=%2d, nregs=%d, mode=%4s, named=%d",
749 words, cum->words, cum->nregs, GET_MODE_NAME (mode), named);
751 if (ret)
752 fprintf (stderr, ", reg=%%e%s", reg_names[ REGNO(ret) ]);
753 else
754 fprintf (stderr, ", stack");
756 fprintf (stderr, " )\n");
759 return ret;
762 /* For an arg passed partly in registers and partly in memory,
763 this is the number of registers used.
764 For args passed entirely in registers or entirely in memory, zero. */
767 function_arg_partial_nregs (cum, mode, type, named)
768 CUMULATIVE_ARGS *cum; /* current arg information */
769 enum machine_mode mode; /* current arg mode */
770 tree type; /* type of the argument or 0 if lib support */
771 int named; /* != 0 for normal args, == 0 for ... args */
773 return 0;
777 /* Output an insn whose source is a 386 integer register. SRC is the
778 rtx for the register, and TEMPLATE is the op-code template. SRC may
779 be either SImode or DImode.
781 The template will be output with operands[0] as SRC, and operands[1]
782 as a pointer to the top of the 386 stack. So a call from floatsidf2
783 would look like this:
785 output_op_from_reg (operands[1], AS1 (fild%z0,%1));
787 where %z0 corresponds to the caller's operands[1], and is used to
788 emit the proper size suffix.
790 ??? Extend this to handle HImode - a 387 can load and store HImode
791 values directly. */
793 void
794 output_op_from_reg (src, template)
795 rtx src;
796 char *template;
798 rtx xops[4];
799 int size = GET_MODE_SIZE (GET_MODE (src));
801 xops[0] = src;
802 xops[1] = AT_SP (Pmode);
803 xops[2] = GEN_INT (size);
804 xops[3] = stack_pointer_rtx;
806 if (size > UNITS_PER_WORD)
808 rtx high;
809 if (size > 2 * UNITS_PER_WORD)
811 high = gen_rtx (REG, SImode, REGNO (src) + 2);
812 output_asm_insn (AS1 (push%L0,%0), &high);
814 high = gen_rtx (REG, SImode, REGNO (src) + 1);
815 output_asm_insn (AS1 (push%L0,%0), &high);
817 output_asm_insn (AS1 (push%L0,%0), &src);
819 output_asm_insn (template, xops);
821 output_asm_insn (AS2 (add%L3,%2,%3), xops);
824 /* Output an insn to pop an value from the 387 top-of-stack to 386
825 register DEST. The 387 register stack is popped if DIES is true. If
826 the mode of DEST is an integer mode, a `fist' integer store is done,
827 otherwise a `fst' float store is done. */
829 void
830 output_to_reg (dest, dies, scratch_mem)
831 rtx dest;
832 int dies;
833 rtx scratch_mem;
835 rtx xops[4];
836 int size = GET_MODE_SIZE (GET_MODE (dest));
838 if (! scratch_mem)
839 xops[0] = AT_SP (Pmode);
840 else
841 xops[0] = scratch_mem;
842 xops[1] = stack_pointer_rtx;
843 xops[2] = GEN_INT (size);
844 xops[3] = dest;
846 if (! scratch_mem)
847 output_asm_insn (AS2 (sub%L1,%2,%1), xops);
849 if (GET_MODE_CLASS (GET_MODE (dest)) == MODE_INT)
851 if (dies)
852 output_asm_insn (AS1 (fistp%z3,%y0), xops);
853 else
854 output_asm_insn (AS1 (fist%z3,%y0), xops);
856 else if (GET_MODE_CLASS (GET_MODE (dest)) == MODE_FLOAT)
858 if (dies)
859 output_asm_insn (AS1 (fstp%z3,%y0), xops);
860 else
862 if (GET_MODE (dest) == XFmode)
864 output_asm_insn (AS1 (fstp%z3,%y0), xops);
865 output_asm_insn (AS1 (fld%z3,%y0), xops);
867 else
868 output_asm_insn (AS1 (fst%z3,%y0), xops);
871 else
872 abort ();
874 if (! scratch_mem)
875 output_asm_insn (AS1 (pop%L0,%0), &dest);
876 else
877 output_asm_insn (AS2 (mov%L0,%0,%3), xops);
880 if (size > UNITS_PER_WORD)
882 dest = gen_rtx (REG, SImode, REGNO (dest) + 1);
883 if (! scratch_mem)
884 output_asm_insn (AS1 (pop%L0,%0), &dest);
885 else
887 xops[0] = adj_offsettable_operand (xops[0], 4);
888 xops[3] = dest;
889 output_asm_insn (AS2 (mov%L0,%0,%3), xops);
891 if (size > 2 * UNITS_PER_WORD)
893 dest = gen_rtx (REG, SImode, REGNO (dest) + 1);
894 if (! scratch_mem)
895 output_asm_insn (AS1 (pop%L0,%0), &dest);
896 else
898 xops[0] = adj_offsettable_operand (xops[0], 4);
899 output_asm_insn (AS2 (mov%L0,%0,%3), xops);
905 char *
906 singlemove_string (operands)
907 rtx *operands;
909 rtx x;
910 if (GET_CODE (operands[0]) == MEM
911 && GET_CODE (x = XEXP (operands[0], 0)) == PRE_DEC)
913 if (XEXP (x, 0) != stack_pointer_rtx)
914 abort ();
915 return "push%L1 %1";
917 else if (GET_CODE (operands[1]) == CONST_DOUBLE)
919 return output_move_const_single (operands);
921 else if (GET_CODE (operands[0]) == REG || GET_CODE (operands[1]) == REG)
922 return AS2 (mov%L0,%1,%0);
923 else if (CONSTANT_P (operands[1]))
924 return AS2 (mov%L0,%1,%0);
925 else
927 output_asm_insn ("push%L1 %1", operands);
928 return "pop%L0 %0";
932 /* Return a REG that occurs in ADDR with coefficient 1.
933 ADDR can be effectively incremented by incrementing REG. */
935 static rtx
936 find_addr_reg (addr)
937 rtx addr;
939 while (GET_CODE (addr) == PLUS)
941 if (GET_CODE (XEXP (addr, 0)) == REG)
942 addr = XEXP (addr, 0);
943 else if (GET_CODE (XEXP (addr, 1)) == REG)
944 addr = XEXP (addr, 1);
945 else if (CONSTANT_P (XEXP (addr, 0)))
946 addr = XEXP (addr, 1);
947 else if (CONSTANT_P (XEXP (addr, 1)))
948 addr = XEXP (addr, 0);
949 else
950 abort ();
952 if (GET_CODE (addr) == REG)
953 return addr;
954 abort ();
958 /* Output an insn to add the constant N to the register X. */
960 static void
961 asm_add (n, x)
962 int n;
963 rtx x;
965 rtx xops[2];
966 xops[0] = x;
968 if (n == -1)
969 output_asm_insn (AS1 (dec%L0,%0), xops);
970 else if (n == 1)
971 output_asm_insn (AS1 (inc%L0,%0), xops);
972 else if (n < 0 || n == 128)
974 xops[1] = GEN_INT (-n);
975 output_asm_insn (AS2 (sub%L0,%1,%0), xops);
977 else if (n > 0)
979 xops[1] = GEN_INT (n);
980 output_asm_insn (AS2 (add%L0,%1,%0), xops);
985 /* Output assembler code to perform a doubleword move insn
986 with operands OPERANDS. */
988 char *
989 output_move_double (operands)
990 rtx *operands;
992 enum {REGOP, OFFSOP, MEMOP, PUSHOP, POPOP, CNSTOP, RNDOP } optype0, optype1;
993 rtx latehalf[2];
994 rtx middlehalf[2];
995 rtx xops[2];
996 rtx addreg0 = 0, addreg1 = 0;
997 int dest_overlapped_low = 0;
998 int size = GET_MODE_SIZE (GET_MODE (operands[0]));
1000 middlehalf[0] = 0;
1001 middlehalf[1] = 0;
1003 /* First classify both operands. */
1005 if (REG_P (operands[0]))
1006 optype0 = REGOP;
1007 else if (offsettable_memref_p (operands[0]))
1008 optype0 = OFFSOP;
1009 else if (GET_CODE (XEXP (operands[0], 0)) == POST_INC)
1010 optype0 = POPOP;
1011 else if (GET_CODE (XEXP (operands[0], 0)) == PRE_DEC)
1012 optype0 = PUSHOP;
1013 else if (GET_CODE (operands[0]) == MEM)
1014 optype0 = MEMOP;
1015 else
1016 optype0 = RNDOP;
1018 if (REG_P (operands[1]))
1019 optype1 = REGOP;
1020 else if (CONSTANT_P (operands[1]))
1021 optype1 = CNSTOP;
1022 else if (offsettable_memref_p (operands[1]))
1023 optype1 = OFFSOP;
1024 else if (GET_CODE (XEXP (operands[1], 0)) == POST_INC)
1025 optype1 = POPOP;
1026 else if (GET_CODE (XEXP (operands[1], 0)) == PRE_DEC)
1027 optype1 = PUSHOP;
1028 else if (GET_CODE (operands[1]) == MEM)
1029 optype1 = MEMOP;
1030 else
1031 optype1 = RNDOP;
1033 /* Check for the cases that the operand constraints are not
1034 supposed to allow to happen. Abort if we get one,
1035 because generating code for these cases is painful. */
1037 if (optype0 == RNDOP || optype1 == RNDOP)
1038 abort ();
1040 /* If one operand is decrementing and one is incrementing
1041 decrement the former register explicitly
1042 and change that operand into ordinary indexing. */
1044 if (optype0 == PUSHOP && optype1 == POPOP)
1046 /* ??? Can this ever happen on i386? */
1047 operands[0] = XEXP (XEXP (operands[0], 0), 0);
1048 asm_add (-size, operands[0]);
1049 if (GET_MODE (operands[1]) == XFmode)
1050 operands[0] = gen_rtx (MEM, XFmode, operands[0]);
1051 else if (GET_MODE (operands[0]) == DFmode)
1052 operands[0] = gen_rtx (MEM, DFmode, operands[0]);
1053 else
1054 operands[0] = gen_rtx (MEM, DImode, operands[0]);
1055 optype0 = OFFSOP;
1058 if (optype0 == POPOP && optype1 == PUSHOP)
1060 /* ??? Can this ever happen on i386? */
1061 operands[1] = XEXP (XEXP (operands[1], 0), 0);
1062 asm_add (-size, operands[1]);
1063 if (GET_MODE (operands[1]) == XFmode)
1064 operands[1] = gen_rtx (MEM, XFmode, operands[1]);
1065 else if (GET_MODE (operands[1]) == DFmode)
1066 operands[1] = gen_rtx (MEM, DFmode, operands[1]);
1067 else
1068 operands[1] = gen_rtx (MEM, DImode, operands[1]);
1069 optype1 = OFFSOP;
1072 /* If an operand is an unoffsettable memory ref, find a register
1073 we can increment temporarily to make it refer to the second word. */
1075 if (optype0 == MEMOP)
1076 addreg0 = find_addr_reg (XEXP (operands[0], 0));
1078 if (optype1 == MEMOP)
1079 addreg1 = find_addr_reg (XEXP (operands[1], 0));
1081 /* Ok, we can do one word at a time.
1082 Normally we do the low-numbered word first,
1083 but if either operand is autodecrementing then we
1084 do the high-numbered word first.
1086 In either case, set up in LATEHALF the operands to use
1087 for the high-numbered word and in some cases alter the
1088 operands in OPERANDS to be suitable for the low-numbered word. */
1090 if (size == 12)
1092 if (optype0 == REGOP)
1094 middlehalf[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 1);
1095 latehalf[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 2);
1097 else if (optype0 == OFFSOP)
1099 middlehalf[0] = adj_offsettable_operand (operands[0], 4);
1100 latehalf[0] = adj_offsettable_operand (operands[0], 8);
1102 else
1104 middlehalf[0] = operands[0];
1105 latehalf[0] = operands[0];
1108 if (optype1 == REGOP)
1110 middlehalf[1] = gen_rtx (REG, SImode, REGNO (operands[1]) + 1);
1111 latehalf[1] = gen_rtx (REG, SImode, REGNO (operands[1]) + 2);
1113 else if (optype1 == OFFSOP)
1115 middlehalf[1] = adj_offsettable_operand (operands[1], 4);
1116 latehalf[1] = adj_offsettable_operand (operands[1], 8);
1118 else if (optype1 == CNSTOP)
1120 if (GET_CODE (operands[1]) == CONST_DOUBLE)
1122 REAL_VALUE_TYPE r; long l[3];
1124 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
1125 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, l);
1126 operands[1] = GEN_INT (l[0]);
1127 middlehalf[1] = GEN_INT (l[1]);
1128 latehalf[1] = GEN_INT (l[2]);
1130 else if (CONSTANT_P (operands[1]))
1131 /* No non-CONST_DOUBLE constant should ever appear here. */
1132 abort ();
1134 else
1136 middlehalf[1] = operands[1];
1137 latehalf[1] = operands[1];
1140 else /* size is not 12: */
1142 if (optype0 == REGOP)
1143 latehalf[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 1);
1144 else if (optype0 == OFFSOP)
1145 latehalf[0] = adj_offsettable_operand (operands[0], 4);
1146 else
1147 latehalf[0] = operands[0];
1149 if (optype1 == REGOP)
1150 latehalf[1] = gen_rtx (REG, SImode, REGNO (operands[1]) + 1);
1151 else if (optype1 == OFFSOP)
1152 latehalf[1] = adj_offsettable_operand (operands[1], 4);
1153 else if (optype1 == CNSTOP)
1154 split_double (operands[1], &operands[1], &latehalf[1]);
1155 else
1156 latehalf[1] = operands[1];
1159 /* If insn is effectively movd N (sp),-(sp) then we will do the
1160 high word first. We should use the adjusted operand 1
1161 (which is N+4 (sp) or N+8 (sp))
1162 for the low word and middle word as well,
1163 to compensate for the first decrement of sp. */
1164 if (optype0 == PUSHOP
1165 && REGNO (XEXP (XEXP (operands[0], 0), 0)) == STACK_POINTER_REGNUM
1166 && reg_overlap_mentioned_p (stack_pointer_rtx, operands[1]))
1167 middlehalf[1] = operands[1] = latehalf[1];
1169 /* For (set (reg:DI N) (mem:DI ... (reg:SI N) ...)),
1170 if the upper part of reg N does not appear in the MEM, arrange to
1171 emit the move late-half first. Otherwise, compute the MEM address
1172 into the upper part of N and use that as a pointer to the memory
1173 operand. */
1174 if (optype0 == REGOP
1175 && (optype1 == OFFSOP || optype1 == MEMOP))
1177 if (reg_mentioned_p (operands[0], XEXP (operands[1], 0))
1178 && reg_mentioned_p (latehalf[0], XEXP (operands[1], 0)))
1180 /* If both halves of dest are used in the src memory address,
1181 compute the address into latehalf of dest. */
1182 compadr:
1183 xops[0] = latehalf[0];
1184 xops[1] = XEXP (operands[1], 0);
1185 output_asm_insn (AS2 (lea%L0,%a1,%0), xops);
1186 if( GET_MODE (operands[1]) == XFmode )
1188 /* abort (); */
1189 operands[1] = gen_rtx (MEM, XFmode, latehalf[0]);
1190 middlehalf[1] = adj_offsettable_operand (operands[1], size-8);
1191 latehalf[1] = adj_offsettable_operand (operands[1], size-4);
1193 else
1195 operands[1] = gen_rtx (MEM, DImode, latehalf[0]);
1196 latehalf[1] = adj_offsettable_operand (operands[1], size-4);
1199 else if (size == 12
1200 && reg_mentioned_p (middlehalf[0], XEXP (operands[1], 0)))
1202 /* Check for two regs used by both source and dest. */
1203 if (reg_mentioned_p (operands[0], XEXP (operands[1], 0))
1204 || reg_mentioned_p (latehalf[0], XEXP (operands[1], 0)))
1205 goto compadr;
1207 /* JRV says this can't happen: */
1208 if (addreg0 || addreg1)
1209 abort();
1211 /* Only the middle reg conflicts; simply put it last. */
1212 output_asm_insn (singlemove_string (operands), operands);
1213 output_asm_insn (singlemove_string (latehalf), latehalf);
1214 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1215 return "";
1217 else if (reg_mentioned_p (operands[0], XEXP (operands[1], 0)))
1218 /* If the low half of dest is mentioned in the source memory
1219 address, the arrange to emit the move late half first. */
1220 dest_overlapped_low = 1;
1223 /* If one or both operands autodecrementing,
1224 do the two words, high-numbered first. */
1226 /* Likewise, the first move would clobber the source of the second one,
1227 do them in the other order. This happens only for registers;
1228 such overlap can't happen in memory unless the user explicitly
1229 sets it up, and that is an undefined circumstance. */
1232 if (optype0 == PUSHOP || optype1 == PUSHOP
1233 || (optype0 == REGOP && optype1 == REGOP
1234 && REGNO (operands[0]) == REGNO (latehalf[1]))
1235 || dest_overlapped_low)
1237 if (optype0 == PUSHOP || optype1 == PUSHOP
1238 || (optype0 == REGOP && optype1 == REGOP
1239 && ((middlehalf[1] && REGNO (operands[0]) == REGNO (middlehalf[1]))
1240 || REGNO (operands[0]) == REGNO (latehalf[1])))
1241 || dest_overlapped_low)
1243 /* Make any unoffsettable addresses point at high-numbered word. */
1244 if (addreg0)
1245 asm_add (size-4, addreg0);
1246 if (addreg1)
1247 asm_add (size-4, addreg1);
1249 /* Do that word. */
1250 output_asm_insn (singlemove_string (latehalf), latehalf);
1252 /* Undo the adds we just did. */
1253 if (addreg0)
1254 asm_add (-4, addreg0);
1255 if (addreg1)
1256 asm_add (-4, addreg1);
1258 if (size == 12)
1260 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1261 if (addreg0)
1262 asm_add (-4, addreg0);
1263 if (addreg1)
1264 asm_add (-4, addreg1);
1267 /* Do low-numbered word. */
1268 return singlemove_string (operands);
1271 /* Normal case: do the two words, low-numbered first. */
1273 output_asm_insn (singlemove_string (operands), operands);
1275 /* Do the middle one of the three words for long double */
1276 if (size == 12)
1278 if (addreg0)
1279 asm_add (4, addreg0);
1280 if (addreg1)
1281 asm_add (4, addreg1);
1283 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1286 /* Make any unoffsettable addresses point at high-numbered word. */
1287 if (addreg0)
1288 asm_add (4, addreg0);
1289 if (addreg1)
1290 asm_add (4, addreg1);
1292 /* Do that word. */
1293 output_asm_insn (singlemove_string (latehalf), latehalf);
1295 /* Undo the adds we just did. */
1296 if (addreg0)
1297 asm_add (4-size, addreg0);
1298 if (addreg1)
1299 asm_add (4-size, addreg1);
1301 return "";
1305 #define MAX_TMPS 2 /* max temporary registers used */
1307 /* Output the appropriate code to move push memory on the stack */
1309 char *
1310 output_move_pushmem (operands, insn, length, tmp_start, n_operands)
1311 rtx operands[];
1312 rtx insn;
1313 int length;
1314 int tmp_start;
1315 int n_operands;
1318 struct {
1319 char *load;
1320 char *push;
1321 rtx xops[2];
1322 } tmp_info[MAX_TMPS];
1324 rtx src = operands[1];
1325 int max_tmps = 0;
1326 int offset = 0;
1327 int stack_p = reg_overlap_mentioned_p (stack_pointer_rtx, src);
1328 int stack_offset = 0;
1329 int i, num_tmps;
1330 rtx xops[1];
1332 if (!offsettable_memref_p (src))
1333 fatal_insn ("Source is not offsettable", insn);
1335 if ((length & 3) != 0)
1336 fatal_insn ("Pushing non-word aligned size", insn);
1338 /* Figure out which temporary registers we have available */
1339 for (i = tmp_start; i < n_operands; i++)
1341 if (GET_CODE (operands[i]) == REG)
1343 if (reg_overlap_mentioned_p (operands[i], src))
1344 continue;
1346 tmp_info[ max_tmps++ ].xops[1] = operands[i];
1347 if (max_tmps == MAX_TMPS)
1348 break;
1352 if (max_tmps == 0)
1353 for (offset = length - 4; offset >= 0; offset -= 4)
1355 xops[0] = adj_offsettable_operand (src, offset + stack_offset);
1356 output_asm_insn (AS1(push%L0,%0), xops);
1357 if (stack_p)
1358 stack_offset += 4;
1361 else
1362 for (offset = length - 4; offset >= 0; )
1364 for (num_tmps = 0; num_tmps < max_tmps && offset >= 0; num_tmps++)
1366 tmp_info[num_tmps].load = AS2(mov%L0,%0,%1);
1367 tmp_info[num_tmps].push = AS1(push%L0,%1);
1368 tmp_info[num_tmps].xops[0] = adj_offsettable_operand (src, offset + stack_offset);
1369 offset -= 4;
1372 for (i = 0; i < num_tmps; i++)
1373 output_asm_insn (tmp_info[i].load, tmp_info[i].xops);
1375 for (i = 0; i < num_tmps; i++)
1376 output_asm_insn (tmp_info[i].push, tmp_info[i].xops);
1378 if (stack_p)
1379 stack_offset += 4*num_tmps;
1382 return "";
1387 /* Output the appropriate code to move data between two memory locations */
1389 char *
1390 output_move_memory (operands, insn, length, tmp_start, n_operands)
1391 rtx operands[];
1392 rtx insn;
1393 int length;
1394 int tmp_start;
1395 int n_operands;
1397 struct {
1398 char *load;
1399 char *store;
1400 rtx xops[3];
1401 } tmp_info[MAX_TMPS];
1403 rtx dest = operands[0];
1404 rtx src = operands[1];
1405 rtx qi_tmp = NULL_RTX;
1406 int max_tmps = 0;
1407 int offset = 0;
1408 int i, num_tmps;
1409 rtx xops[3];
1411 if (GET_CODE (dest) == MEM
1412 && GET_CODE (XEXP (dest, 0)) == PRE_INC
1413 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx)
1414 return output_move_pushmem (operands, insn, length, tmp_start, n_operands);
1416 if (!offsettable_memref_p (src))
1417 fatal_insn ("Source is not offsettable", insn);
1419 if (!offsettable_memref_p (dest))
1420 fatal_insn ("Destination is not offsettable", insn);
1422 /* Figure out which temporary registers we have available */
1423 for (i = tmp_start; i < n_operands; i++)
1425 if (GET_CODE (operands[i]) == REG)
1427 if ((length & 1) != 0 && !qi_tmp && QI_REG_P (operands[i]))
1428 qi_tmp = operands[i];
1430 if (reg_overlap_mentioned_p (operands[i], dest))
1431 fatal_insn ("Temporary register overlaps the destination", insn);
1433 if (reg_overlap_mentioned_p (operands[i], src))
1434 fatal_insn ("Temporary register overlaps the source", insn);
1436 tmp_info[ max_tmps++ ].xops[2] = operands[i];
1437 if (max_tmps == MAX_TMPS)
1438 break;
1442 if (max_tmps == 0)
1443 fatal_insn ("No scratch registers were found to do memory->memory moves", insn);
1445 if ((length & 1) != 0)
1447 if (!qi_tmp)
1448 fatal_insn ("No byte register found when moving odd # of bytes.", insn);
1451 while (length > 1)
1453 for (num_tmps = 0; num_tmps < max_tmps; num_tmps++)
1455 if (length >= 4)
1457 tmp_info[num_tmps].load = AS2(mov%L0,%1,%2);
1458 tmp_info[num_tmps].store = AS2(mov%L0,%2,%0);
1459 tmp_info[num_tmps].xops[0] = adj_offsettable_operand (dest, offset);
1460 tmp_info[num_tmps].xops[1] = adj_offsettable_operand (src, offset);
1461 offset += 4;
1462 length -= 4;
1464 else if (length >= 2)
1466 tmp_info[num_tmps].load = AS2(mov%W0,%1,%2);
1467 tmp_info[num_tmps].store = AS2(mov%W0,%2,%0);
1468 tmp_info[num_tmps].xops[0] = adj_offsettable_operand (dest, offset);
1469 tmp_info[num_tmps].xops[1] = adj_offsettable_operand (src, offset);
1470 offset += 2;
1471 length -= 2;
1473 else
1474 break;
1477 for (i = 0; i < num_tmps; i++)
1478 output_asm_insn (tmp_info[i].load, tmp_info[i].xops);
1480 for (i = 0; i < num_tmps; i++)
1481 output_asm_insn (tmp_info[i].store, tmp_info[i].xops);
1484 if (length == 1)
1486 xops[0] = adj_offsettable_operand (dest, offset);
1487 xops[1] = adj_offsettable_operand (src, offset);
1488 xops[2] = qi_tmp;
1489 output_asm_insn (AS2(mov%B0,%1,%2), xops);
1490 output_asm_insn (AS2(mov%B0,%2,%0), xops);
1493 return "";
1498 standard_80387_constant_p (x)
1499 rtx x;
1501 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1502 REAL_VALUE_TYPE d;
1503 jmp_buf handler;
1504 int is0, is1;
1506 if (setjmp (handler))
1507 return 0;
1509 set_float_handler (handler);
1510 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
1511 is0 = REAL_VALUES_EQUAL (d, dconst0) && !REAL_VALUE_MINUS_ZERO (d);
1512 is1 = REAL_VALUES_EQUAL (d, dconst1);
1513 set_float_handler (NULL_PTR);
1515 if (is0)
1516 return 1;
1518 if (is1)
1519 return 2;
1521 /* Note that on the 80387, other constants, such as pi,
1522 are much slower to load as standard constants
1523 than to load from doubles in memory! */
1524 #endif
1526 return 0;
1529 char *
1530 output_move_const_single (operands)
1531 rtx *operands;
1533 if (FP_REG_P (operands[0]))
1535 int conval = standard_80387_constant_p (operands[1]);
1537 if (conval == 1)
1538 return "fldz";
1540 if (conval == 2)
1541 return "fld1";
1543 if (GET_CODE (operands[1]) == CONST_DOUBLE)
1545 REAL_VALUE_TYPE r; long l;
1547 if (GET_MODE (operands[1]) == XFmode)
1548 abort ();
1550 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
1551 REAL_VALUE_TO_TARGET_SINGLE (r, l);
1552 operands[1] = GEN_INT (l);
1554 return singlemove_string (operands);
1557 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
1558 reference and a constant. */
1561 symbolic_operand (op, mode)
1562 register rtx op;
1563 enum machine_mode mode;
1565 switch (GET_CODE (op))
1567 case SYMBOL_REF:
1568 case LABEL_REF:
1569 return 1;
1570 case CONST:
1571 op = XEXP (op, 0);
1572 return ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
1573 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
1574 && GET_CODE (XEXP (op, 1)) == CONST_INT);
1575 default:
1576 return 0;
1580 /* Test for a valid operand for a call instruction.
1581 Don't allow the arg pointer register or virtual regs
1582 since they may change into reg + const, which the patterns
1583 can't handle yet. */
1586 call_insn_operand (op, mode)
1587 rtx op;
1588 enum machine_mode mode;
1590 if (GET_CODE (op) == MEM
1591 && ((CONSTANT_ADDRESS_P (XEXP (op, 0))
1592 /* This makes a difference for PIC. */
1593 && general_operand (XEXP (op, 0), Pmode))
1594 || (GET_CODE (XEXP (op, 0)) == REG
1595 && XEXP (op, 0) != arg_pointer_rtx
1596 && !(REGNO (XEXP (op, 0)) >= FIRST_PSEUDO_REGISTER
1597 && REGNO (XEXP (op, 0)) <= LAST_VIRTUAL_REGISTER))))
1598 return 1;
1599 return 0;
1602 /* Like call_insn_operand but allow (mem (symbol_ref ...))
1603 even if pic. */
1606 expander_call_insn_operand (op, mode)
1607 rtx op;
1608 enum machine_mode mode;
1610 if (GET_CODE (op) == MEM
1611 && (CONSTANT_ADDRESS_P (XEXP (op, 0))
1612 || (GET_CODE (XEXP (op, 0)) == REG
1613 && XEXP (op, 0) != arg_pointer_rtx
1614 && !(REGNO (XEXP (op, 0)) >= FIRST_PSEUDO_REGISTER
1615 && REGNO (XEXP (op, 0)) <= LAST_VIRTUAL_REGISTER))))
1616 return 1;
1617 return 0;
1620 /* Return 1 if OP is a comparison operator that can use the condition code
1621 generated by an arithmetic operation. */
1624 arithmetic_comparison_operator (op, mode)
1625 register rtx op;
1626 enum machine_mode mode;
1628 enum rtx_code code;
1630 if (mode != VOIDmode && mode != GET_MODE (op))
1631 return 0;
1632 code = GET_CODE (op);
1633 if (GET_RTX_CLASS (code) != '<')
1634 return 0;
1636 return (code != GT && code != LE);
1639 /* Returns 1 if OP contains a symbol reference */
1642 symbolic_reference_mentioned_p (op)
1643 rtx op;
1645 register char *fmt;
1646 register int i;
1648 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1649 return 1;
1651 fmt = GET_RTX_FORMAT (GET_CODE (op));
1652 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1654 if (fmt[i] == 'E')
1656 register int j;
1658 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1659 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1660 return 1;
1662 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1663 return 1;
1666 return 0;
1669 /* Attempt to expand a binary operator. Make the expansion closer to the
1670 actual machine, then just general_operand, which will allow 3 separate
1671 memory references (one output, two input) in a single insn. Return
1672 whether the insn fails, or succeeds. */
1675 ix86_expand_binary_operator (code, mode, operands)
1676 enum rtx_code code;
1677 enum machine_mode mode;
1678 rtx operands[];
1680 rtx insn;
1681 int i;
1682 int modified;
1684 /* Recognize <var1> = <value> <op> <var1> for commutative operators */
1685 if (GET_RTX_CLASS (code) == 'c'
1686 && (rtx_equal_p (operands[0], operands[2])
1687 || immediate_operand (operands[1], mode)))
1689 rtx temp = operands[1];
1690 operands[1] = operands[2];
1691 operands[2] = temp;
1694 /* If optimizing, copy to regs to improve CSE */
1695 if (TARGET_PSEUDO && optimize && ((reload_in_progress | reload_completed) == 0))
1697 if (GET_CODE (operands[1]) == MEM && !rtx_equal_p (operands[0], operands[1]))
1698 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1700 if (GET_CODE (operands[2]) == MEM)
1701 operands[2] = force_reg (GET_MODE (operands[2]), operands[2]);
1703 if (GET_CODE (operands[1]) == CONST_INT && code == MINUS)
1705 rtx temp = gen_reg_rtx (GET_MODE (operands[0]));
1706 emit_move_insn (temp, operands[1]);
1707 operands[1] = temp;
1708 return TRUE;
1712 if (!ix86_binary_operator_ok (code, mode, operands))
1714 /* If not optimizing, try to make a valid insn (optimize code previously did
1715 this above to improve chances of CSE) */
1717 if ((!TARGET_PSEUDO || !optimize)
1718 && ((reload_in_progress | reload_completed) == 0)
1719 && (GET_CODE (operands[1]) == MEM || GET_CODE (operands[2]) == MEM))
1721 modified = FALSE;
1722 if (GET_CODE (operands[1]) == MEM && !rtx_equal_p (operands[0], operands[1]))
1724 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1725 modified = TRUE;
1728 if (GET_CODE (operands[2]) == MEM)
1730 operands[2] = force_reg (GET_MODE (operands[2]), operands[2]);
1731 modified = TRUE;
1734 if (GET_CODE (operands[1]) == CONST_INT && code == MINUS)
1736 rtx temp = gen_reg_rtx (GET_MODE (operands[0]));
1737 emit_move_insn (temp, operands[1]);
1738 operands[1] = temp;
1739 return TRUE;
1742 if (modified && !ix86_binary_operator_ok (code, mode, operands))
1743 return FALSE;
1745 else
1746 return FALSE;
1749 return TRUE;
1752 /* Return TRUE or FALSE depending on whether the binary operator meets the
1753 appropriate constraints. */
1756 ix86_binary_operator_ok (code, mode, operands)
1757 enum rtx_code code;
1758 enum machine_mode mode;
1759 rtx operands[3];
1761 return (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)
1762 && (GET_CODE (operands[1]) != CONST_INT || GET_RTX_CLASS (code) == 'c');
1765 /* Attempt to expand a unary operator. Make the expansion closer to the
1766 actual machine, then just general_operand, which will allow 2 separate
1767 memory references (one output, one input) in a single insn. Return
1768 whether the insn fails, or succeeds. */
1771 ix86_expand_unary_operator (code, mode, operands)
1772 enum rtx_code code;
1773 enum machine_mode mode;
1774 rtx operands[];
1776 rtx insn;
1778 /* If optimizing, copy to regs to improve CSE */
1779 if (TARGET_PSEUDO
1780 && optimize
1781 && ((reload_in_progress | reload_completed) == 0)
1782 && GET_CODE (operands[1]) == MEM)
1784 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1787 if (!ix86_unary_operator_ok (code, mode, operands))
1789 if ((!TARGET_PSEUDO || !optimize)
1790 && ((reload_in_progress | reload_completed) == 0)
1791 && GET_CODE (operands[1]) == MEM)
1793 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1794 if (!ix86_unary_operator_ok (code, mode, operands))
1795 return FALSE;
1797 else
1798 return FALSE;
1801 return TRUE;
1804 /* Return TRUE or FALSE depending on whether the unary operator meets the
1805 appropriate constraints. */
1808 ix86_unary_operator_ok (code, mode, operands)
1809 enum rtx_code code;
1810 enum machine_mode mode;
1811 rtx operands[2];
1813 return TRUE;
1818 static rtx pic_label_rtx;
1819 static char pic_label_name [256];
1820 static int pic_label_no = 0;
1822 /* This function generates code for -fpic that loads %ebx with
1823 with the return address of the caller and then returns. */
1824 void
1825 asm_output_function_prefix (file, name)
1826 FILE * file;
1827 char * name;
1829 rtx xops[2];
1830 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1831 || current_function_uses_const_pool);
1832 xops[0] = pic_offset_table_rtx;
1833 xops[1] = stack_pointer_rtx;
1835 /* deep branch prediction favors having a return for every call */
1836 if (pic_reg_used && TARGET_DEEP_BRANCH_PREDICTION)
1838 tree prologue_node;
1840 if (pic_label_rtx == 0)
1842 pic_label_rtx = (rtx) gen_label_rtx ();
1843 sprintf (pic_label_name, "LPR%d", pic_label_no++);
1844 LABEL_NAME (pic_label_rtx) = pic_label_name;
1846 prologue_node = make_node (FUNCTION_DECL);
1847 DECL_RESULT (prologue_node) = 0;
1848 #ifdef ASM_DECLARE_FUNCTION_NAME
1849 ASM_DECLARE_FUNCTION_NAME (file, pic_label_name, prologue_node);
1850 #endif
1851 output_asm_insn ("movl (%1),%0", xops);
1852 output_asm_insn ("ret", xops);
1856 /* Set up the stack and frame (if desired) for the function. */
1858 void
1859 function_prologue (file, size)
1860 FILE *file;
1861 int size;
1863 register int regno;
1864 int limit;
1865 rtx xops[4];
1866 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1867 || current_function_uses_const_pool);
1868 long tsize = get_frame_size ();
1869 int cfa_offset = INCOMING_FRAME_SP_OFFSET, cfa_store_offset = cfa_offset;
1871 /* pic references don't explicitly mention pic_offset_table_rtx */
1872 if (TARGET_SCHEDULE_PROLOGUE)
1874 pic_label_rtx = 0;
1875 return;
1878 xops[0] = stack_pointer_rtx;
1879 xops[1] = frame_pointer_rtx;
1880 xops[2] = GEN_INT (tsize);
1882 if (frame_pointer_needed)
1884 output_asm_insn ("push%L1 %1", xops);
1885 if (dwarf2out_do_frame ())
1887 char *l = (char *) dwarf2out_cfi_label ();
1888 cfa_store_offset += 4;
1889 cfa_offset = cfa_store_offset;
1890 dwarf2out_def_cfa (l, STACK_POINTER_REGNUM, cfa_offset);
1891 dwarf2out_reg_save (l, FRAME_POINTER_REGNUM, -cfa_store_offset);
1893 output_asm_insn (AS2 (mov%L0,%0,%1), xops);
1894 if (dwarf2out_do_frame ())
1895 dwarf2out_def_cfa ("", FRAME_POINTER_REGNUM, cfa_offset);
1898 if (tsize == 0)
1900 else if (! TARGET_STACK_PROBE || tsize < CHECK_STACK_LIMIT)
1902 output_asm_insn (AS2 (sub%L0,%2,%0), xops);
1903 if (dwarf2out_do_frame ())
1905 cfa_store_offset += tsize;
1906 if (! frame_pointer_needed)
1908 cfa_offset = cfa_store_offset;
1909 dwarf2out_def_cfa ("", STACK_POINTER_REGNUM, cfa_offset);
1913 else
1915 xops[3] = gen_rtx (REG, SImode, 0);
1916 output_asm_insn (AS2 (mov%L0,%2,%3), xops);
1918 xops[3] = gen_rtx (SYMBOL_REF, Pmode, "_alloca");
1919 output_asm_insn (AS1 (call,%P3), xops);
1922 /* Note If use enter it is NOT reversed args.
1923 This one is not reversed from intel!!
1924 I think enter is slower. Also sdb doesn't like it.
1925 But if you want it the code is:
1927 xops[3] = const0_rtx;
1928 output_asm_insn ("enter %2,%3", xops);
1931 limit = (frame_pointer_needed ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
1932 for (regno = limit - 1; regno >= 0; regno--)
1933 if ((regs_ever_live[regno] && ! call_used_regs[regno])
1934 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1936 xops[0] = gen_rtx (REG, SImode, regno);
1937 output_asm_insn ("push%L0 %0", xops);
1938 if (dwarf2out_do_frame ())
1940 char *l = (char *) dwarf2out_cfi_label ();
1941 cfa_store_offset += 4;
1942 if (! frame_pointer_needed)
1944 cfa_offset = cfa_store_offset;
1945 dwarf2out_def_cfa (l, STACK_POINTER_REGNUM, cfa_offset);
1947 dwarf2out_reg_save (l, regno, -cfa_store_offset);
1951 if (pic_reg_used && TARGET_DEEP_BRANCH_PREDICTION)
1953 xops[0] = pic_offset_table_rtx;
1954 xops[1] = gen_rtx (SYMBOL_REF, Pmode, LABEL_NAME (pic_label_rtx));
1956 output_asm_insn (AS1 (call,%P1), xops);
1957 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_,%0", xops);
1958 pic_label_rtx = 0;
1960 else if (pic_reg_used)
1962 xops[0] = pic_offset_table_rtx;
1963 xops[1] = (rtx) gen_label_rtx ();
1965 output_asm_insn (AS1 (call,%P1), xops);
1966 ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (xops[1]));
1967 output_asm_insn (AS1 (pop%L0,%0), xops);
1968 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_+[.-%P1],%0", xops);
1972 /* This function generates the assembly code for function entry.
1973 FILE is an stdio stream to output the code to.
1974 SIZE is an int: how many units of temporary storage to allocate. */
1976 void
1977 ix86_expand_prologue ()
1979 register int regno;
1980 int limit;
1981 rtx xops[4];
1982 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1983 || current_function_uses_const_pool);
1984 long tsize = get_frame_size ();
1985 rtx insn;
1987 if (!TARGET_SCHEDULE_PROLOGUE)
1988 return;
1990 xops[0] = stack_pointer_rtx;
1991 xops[1] = frame_pointer_rtx;
1992 xops[2] = GEN_INT (tsize);
1993 if (frame_pointer_needed)
1995 insn = emit_insn
1996 (gen_rtx (SET, 0,
1997 gen_rtx (MEM, SImode,
1998 gen_rtx (PRE_DEC, SImode, stack_pointer_rtx)),
1999 frame_pointer_rtx));
2000 RTX_FRAME_RELATED_P (insn) = 1;
2001 insn = emit_move_insn (xops[1], xops[0]);
2002 RTX_FRAME_RELATED_P (insn) = 1;
2005 if (tsize == 0)
2007 else if (! TARGET_STACK_PROBE || tsize < CHECK_STACK_LIMIT)
2009 insn = emit_insn (gen_prologue_set_stack_ptr (xops[2]));
2010 RTX_FRAME_RELATED_P (insn) = 1;
2012 else
2014 xops[3] = gen_rtx (REG, SImode, 0);
2015 emit_move_insn (xops[3], xops[2]);
2016 xops[3] = gen_rtx (MEM, FUNCTION_MODE,
2017 gen_rtx (SYMBOL_REF, Pmode, "_alloca"));
2018 emit_call_insn (gen_rtx (CALL, VOIDmode,
2019 xops[3], const0_rtx));
2022 /* Note If use enter it is NOT reversed args.
2023 This one is not reversed from intel!!
2024 I think enter is slower. Also sdb doesn't like it.
2025 But if you want it the code is:
2027 xops[3] = const0_rtx;
2028 output_asm_insn ("enter %2,%3", xops);
2031 limit = (frame_pointer_needed ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
2032 for (regno = limit - 1; regno >= 0; regno--)
2033 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2034 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2036 xops[0] = gen_rtx (REG, SImode, regno);
2037 insn = emit_insn
2038 (gen_rtx (SET, 0,
2039 gen_rtx (MEM, SImode,
2040 gen_rtx (PRE_DEC, SImode, stack_pointer_rtx)),
2041 xops[0]));
2043 RTX_FRAME_RELATED_P (insn) = 1;
2046 if (pic_reg_used && TARGET_DEEP_BRANCH_PREDICTION)
2048 xops[0] = pic_offset_table_rtx;
2049 if (pic_label_rtx == 0)
2051 pic_label_rtx = (rtx) gen_label_rtx ();
2052 sprintf (pic_label_name, "LPR%d", pic_label_no++);
2053 LABEL_NAME (pic_label_rtx) = pic_label_name;
2055 xops[1] = gen_rtx (MEM, QImode, gen_rtx (SYMBOL_REF, Pmode, LABEL_NAME (pic_label_rtx)));
2057 emit_insn (gen_prologue_get_pc (xops[0], xops[1]));
2058 emit_insn (gen_prologue_set_got (xops[0],
2059 gen_rtx (SYMBOL_REF, Pmode, "$_GLOBAL_OFFSET_TABLE_"),
2060 gen_rtx (CONST_INT, Pmode, CODE_LABEL_NUMBER(xops[1]))));
2062 else if (pic_reg_used)
2064 xops[0] = pic_offset_table_rtx;
2065 xops[1] = (rtx) gen_label_rtx ();
2067 emit_insn (gen_prologue_get_pc (xops[0], gen_rtx (CONST_INT, Pmode, CODE_LABEL_NUMBER(xops[1]))));
2068 emit_insn (gen_pop (xops[0]));
2069 emit_insn (gen_prologue_set_got (xops[0],
2070 gen_rtx (SYMBOL_REF, Pmode, "$_GLOBAL_OFFSET_TABLE_"),
2071 gen_rtx (CONST_INT, Pmode, CODE_LABEL_NUMBER (xops[1]))));
2075 /* Restore function stack, frame, and registers. */
2077 void
2078 function_epilogue (file, size)
2079 FILE *file;
2080 int size;
2084 /* Return 1 if it is appropriate to emit `ret' instructions in the
2085 body of a function. Do this only if the epilogue is simple, needing a
2086 couple of insns. Prior to reloading, we can't tell how many registers
2087 must be saved, so return 0 then. Return 0 if there is no frame
2088 marker to de-allocate.
2090 If NON_SAVING_SETJMP is defined and true, then it is not possible
2091 for the epilogue to be simple, so return 0. This is a special case
2092 since NON_SAVING_SETJMP will not cause regs_ever_live to change
2093 until final, but jump_optimize may need to know sooner if a
2094 `return' is OK. */
2097 ix86_can_use_return_insn_p ()
2099 int regno;
2100 int nregs = 0;
2101 int reglimit = (frame_pointer_needed
2102 ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
2103 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
2104 || current_function_uses_const_pool);
2106 #ifdef NON_SAVING_SETJMP
2107 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
2108 return 0;
2109 #endif
2111 if (! reload_completed)
2112 return 0;
2114 for (regno = reglimit - 1; regno >= 0; regno--)
2115 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2116 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2117 nregs++;
2119 return nregs == 0 || ! frame_pointer_needed;
2123 /* This function generates the assembly code for function exit.
2124 FILE is an stdio stream to output the code to.
2125 SIZE is an int: how many units of temporary storage to deallocate. */
2127 void
2128 ix86_expand_epilogue ()
2130 register int regno;
2131 register int nregs, limit;
2132 int offset;
2133 rtx xops[3];
2134 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
2135 || current_function_uses_const_pool);
2136 long tsize = get_frame_size ();
2138 /* Compute the number of registers to pop */
2140 limit = (frame_pointer_needed
2141 ? FRAME_POINTER_REGNUM
2142 : STACK_POINTER_REGNUM);
2144 nregs = 0;
2146 for (regno = limit - 1; regno >= 0; regno--)
2147 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2148 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2149 nregs++;
2151 /* sp is often unreliable so we must go off the frame pointer,
2154 /* In reality, we may not care if sp is unreliable, because we can
2155 restore the register relative to the frame pointer. In theory,
2156 since each move is the same speed as a pop, and we don't need the
2157 leal, this is faster. For now restore multiple registers the old
2158 way. */
2160 offset = -tsize - (nregs * UNITS_PER_WORD);
2162 xops[2] = stack_pointer_rtx;
2164 /* When -fpic, we must emit a scheduling barrier, so that the instruction
2165 that restores %ebx (which is PIC_OFFSET_TABLE_REGNUM), does not get
2166 moved before any instruction which implicitly uses the got. This
2167 includes any instruction which uses a SYMBOL_REF or a LABEL_REF.
2169 Alternatively, this could be fixed by making the dependence on the
2170 PIC_OFFSET_TABLE_REGNUM explicit in the RTL. */
2171 if (flag_pic)
2172 emit_insn (gen_blockage ());
2174 if (nregs > 1 || ! frame_pointer_needed)
2176 if (frame_pointer_needed)
2178 xops[0] = adj_offsettable_operand (AT_BP (QImode), offset);
2179 emit_insn (gen_movsi_lea (xops[2], XEXP (xops[0], 0)));
2180 /* output_asm_insn (AS2 (lea%L2,%0,%2), xops);*/
2183 for (regno = 0; regno < limit; regno++)
2184 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2185 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2187 xops[0] = gen_rtx (REG, SImode, regno);
2188 emit_insn (gen_pop (xops[0]));
2189 /* output_asm_insn ("pop%L0 %0", xops);*/
2192 else
2193 for (regno = 0; regno < limit; regno++)
2194 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2195 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2197 xops[0] = gen_rtx (REG, SImode, regno);
2198 xops[1] = adj_offsettable_operand (AT_BP (Pmode), offset);
2199 emit_move_insn (xops[0], xops[1]);
2200 /* output_asm_insn (AS2 (mov%L0,%1,%0), xops);*/
2201 offset += 4;
2204 if (frame_pointer_needed)
2206 /* If not an i386, mov & pop is faster than "leave". */
2208 if (TARGET_USE_LEAVE)
2209 emit_insn (gen_leave());
2210 /* output_asm_insn ("leave", xops);*/
2211 else
2213 xops[0] = frame_pointer_rtx;
2214 xops[1] = stack_pointer_rtx;
2215 emit_insn (gen_epilogue_set_stack_ptr());
2216 /* output_asm_insn (AS2 (mov%L2,%0,%2), xops);*/
2217 emit_insn (gen_pop (xops[0]));
2218 /* output_asm_insn ("pop%L0 %0", xops);*/
2221 else if (tsize)
2223 /* If there is no frame pointer, we must still release the frame. */
2225 xops[0] = GEN_INT (tsize);
2226 emit_insn (gen_rtx (SET, SImode,
2227 xops[2],
2228 gen_rtx (PLUS, SImode,
2229 xops[2],
2230 xops[0])));
2231 /* output_asm_insn (AS2 (add%L2,%0,%2), xops);*/
2234 #ifdef FUNCTION_BLOCK_PROFILER_EXIT
2235 if (profile_block_flag == 2)
2237 FUNCTION_BLOCK_PROFILER_EXIT(file);
2239 #endif
2241 if (current_function_pops_args && current_function_args_size)
2243 xops[1] = GEN_INT (current_function_pops_args);
2245 /* i386 can only pop 32K bytes (maybe 64K? Is it signed?). If
2246 asked to pop more, pop return address, do explicit add, and jump
2247 indirectly to the caller. */
2249 if (current_function_pops_args >= 32768)
2251 /* ??? Which register to use here? */
2252 xops[0] = gen_rtx (REG, SImode, 2);
2253 emit_insn (gen_pop (xops[0]));
2254 /* output_asm_insn ("pop%L0 %0", xops);*/
2255 emit_insn (gen_rtx (SET, SImode,
2256 xops[2],
2257 gen_rtx (PLUS, SImode,
2258 xops[1],
2259 xops[2])));
2260 /* output_asm_insn (AS2 (add%L2,%1,%2), xops);*/
2261 emit_jump_insn (xops[0]);
2262 /* output_asm_insn ("jmp %*%0", xops);*/
2264 else
2265 emit_jump_insn (gen_return_pop_internal (xops[1]));
2266 /* output_asm_insn ("ret %1", xops);*/
2268 else
2269 /* output_asm_insn ("ret", xops);*/
2270 emit_jump_insn (gen_return_internal ());
2274 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2275 that is a valid memory address for an instruction.
2276 The MODE argument is the machine mode for the MEM expression
2277 that wants to use this address.
2279 On x86, legitimate addresses are:
2280 base movl (base),reg
2281 displacement movl disp,reg
2282 base + displacement movl disp(base),reg
2283 index + base movl (base,index),reg
2284 (index + base) + displacement movl disp(base,index),reg
2285 index*scale movl (,index,scale),reg
2286 index*scale + disp movl disp(,index,scale),reg
2287 index*scale + base movl (base,index,scale),reg
2288 (index*scale + base) + disp movl disp(base,index,scale),reg
2290 In each case, scale can be 1, 2, 4, 8. */
2292 /* This is exactly the same as print_operand_addr, except that
2293 it recognizes addresses instead of printing them.
2295 It only recognizes address in canonical form. LEGITIMIZE_ADDRESS should
2296 convert common non-canonical forms to canonical form so that they will
2297 be recognized. */
2299 #define ADDR_INVALID(msg,insn) \
2300 do { \
2301 if (TARGET_DEBUG_ADDR) \
2303 fprintf (stderr, msg); \
2304 debug_rtx (insn); \
2306 } while (0)
2309 legitimate_address_p (mode, addr, strict)
2310 enum machine_mode mode;
2311 register rtx addr;
2312 int strict;
2314 rtx base = NULL_RTX;
2315 rtx indx = NULL_RTX;
2316 rtx scale = NULL_RTX;
2317 rtx disp = NULL_RTX;
2319 if (TARGET_DEBUG_ADDR)
2321 fprintf (stderr,
2322 "\n==========\nGO_IF_LEGITIMATE_ADDRESS, mode = %s, strict = %d\n",
2323 GET_MODE_NAME (mode), strict);
2325 debug_rtx (addr);
2328 if (GET_CODE (addr) == REG || GET_CODE (addr) == SUBREG)
2329 base = addr; /* base reg */
2331 else if (GET_CODE (addr) == PLUS)
2333 rtx op0 = XEXP (addr, 0);
2334 rtx op1 = XEXP (addr, 1);
2335 enum rtx_code code0 = GET_CODE (op0);
2336 enum rtx_code code1 = GET_CODE (op1);
2338 if (code0 == REG || code0 == SUBREG)
2340 if (code1 == REG || code1 == SUBREG)
2342 indx = op0; /* index + base */
2343 base = op1;
2346 else
2348 base = op0; /* base + displacement */
2349 disp = op1;
2353 else if (code0 == MULT)
2355 indx = XEXP (op0, 0);
2356 scale = XEXP (op0, 1);
2358 if (code1 == REG || code1 == SUBREG)
2359 base = op1; /* index*scale + base */
2361 else
2362 disp = op1; /* index*scale + disp */
2365 else if (code0 == PLUS && GET_CODE (XEXP (op0, 0)) == MULT)
2367 indx = XEXP (XEXP (op0, 0), 0); /* index*scale + base + disp */
2368 scale = XEXP (XEXP (op0, 0), 1);
2369 base = XEXP (op0, 1);
2370 disp = op1;
2373 else if (code0 == PLUS)
2375 indx = XEXP (op0, 0); /* index + base + disp */
2376 base = XEXP (op0, 1);
2377 disp = op1;
2380 else
2382 ADDR_INVALID ("PLUS subcode is not valid.\n", op0);
2383 return FALSE;
2387 else if (GET_CODE (addr) == MULT)
2389 indx = XEXP (addr, 0); /* index*scale */
2390 scale = XEXP (addr, 1);
2393 else
2394 disp = addr; /* displacement */
2396 /* Allow arg pointer and stack pointer as index if there is not scaling */
2397 if (base && indx && !scale
2398 && (indx == arg_pointer_rtx || indx == stack_pointer_rtx))
2400 rtx tmp = base;
2401 base = indx;
2402 indx = tmp;
2405 /* Validate base register */
2406 /* Don't allow SUBREG's here, it can lead to spill failures when the base
2407 is one word out of a two word structure, which is represented internally
2408 as a DImode int. */
2409 if (base)
2411 if (GET_CODE (base) != REG)
2413 ADDR_INVALID ("Base is not a register.\n", base);
2414 return FALSE;
2417 if ((strict && !REG_OK_FOR_BASE_STRICT_P (base))
2418 || (!strict && !REG_OK_FOR_BASE_NONSTRICT_P (base)))
2420 ADDR_INVALID ("Base is not valid.\n", base);
2421 return FALSE;
2425 /* Validate index register */
2426 /* Don't allow SUBREG's here, it can lead to spill failures when the index
2427 is one word out of a two word structure, which is represented internally
2428 as a DImode int. */
2429 if (indx)
2431 if (GET_CODE (indx) != REG)
2433 ADDR_INVALID ("Index is not a register.\n", indx);
2434 return FALSE;
2437 if ((strict && !REG_OK_FOR_INDEX_STRICT_P (indx))
2438 || (!strict && !REG_OK_FOR_INDEX_NONSTRICT_P (indx)))
2440 ADDR_INVALID ("Index is not valid.\n", indx);
2441 return FALSE;
2444 else if (scale)
2445 abort (); /* scale w/o index invalid */
2447 /* Validate scale factor */
2448 if (scale)
2450 HOST_WIDE_INT value;
2452 if (GET_CODE (scale) != CONST_INT)
2454 ADDR_INVALID ("Scale is not valid.\n", scale);
2455 return FALSE;
2458 value = INTVAL (scale);
2459 if (value != 1 && value != 2 && value != 4 && value != 8)
2461 ADDR_INVALID ("Scale is not a good multiplier.\n", scale);
2462 return FALSE;
2466 /* Validate displacement
2467 Constant pool addresses must be handled special. They are
2468 considered legitimate addresses, but only if not used with regs.
2469 When printed, the output routines know to print the reference with the
2470 PIC reg, even though the PIC reg doesn't appear in the RTL. */
2471 if (disp)
2473 if (GET_CODE (disp) == SYMBOL_REF
2474 && CONSTANT_POOL_ADDRESS_P (disp)
2475 && !base
2476 && !indx)
2479 else if (!CONSTANT_ADDRESS_P (disp))
2481 ADDR_INVALID ("Displacement is not valid.\n", disp);
2482 return FALSE;
2485 else if (GET_CODE (disp) == CONST_DOUBLE)
2487 ADDR_INVALID ("Displacement is a const_double.\n", disp);
2488 return FALSE;
2491 else if (flag_pic && SYMBOLIC_CONST (disp)
2492 && base != pic_offset_table_rtx
2493 && (indx != pic_offset_table_rtx || scale != NULL_RTX))
2495 ADDR_INVALID ("Displacement is an invalid pic reference.\n", disp);
2496 return FALSE;
2499 else if (HALF_PIC_P () && HALF_PIC_ADDRESS_P (disp)
2500 && (base != NULL_RTX || indx != NULL_RTX))
2502 ADDR_INVALID ("Displacement is an invalid half-pic reference.\n", disp);
2503 return FALSE;
2507 if (TARGET_DEBUG_ADDR)
2508 fprintf (stderr, "Address is valid.\n");
2510 /* Everything looks valid, return true */
2511 return TRUE;
2515 /* Return a legitimate reference for ORIG (an address) using the
2516 register REG. If REG is 0, a new pseudo is generated.
2518 There are three types of references that must be handled:
2520 1. Global data references must load the address from the GOT, via
2521 the PIC reg. An insn is emitted to do this load, and the reg is
2522 returned.
2524 2. Static data references must compute the address as an offset
2525 from the GOT, whose base is in the PIC reg. An insn is emitted to
2526 compute the address into a reg, and the reg is returned. Static
2527 data objects have SYMBOL_REF_FLAG set to differentiate them from
2528 global data objects.
2530 3. Constant pool addresses must be handled special. They are
2531 considered legitimate addresses, but only if not used with regs.
2532 When printed, the output routines know to print the reference with the
2533 PIC reg, even though the PIC reg doesn't appear in the RTL.
2535 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2536 reg also appears in the address (except for constant pool references,
2537 noted above).
2539 "switch" statements also require special handling when generating
2540 PIC code. See comments by the `casesi' insn in i386.md for details. */
2543 legitimize_pic_address (orig, reg)
2544 rtx orig;
2545 rtx reg;
2547 rtx addr = orig;
2548 rtx new = orig;
2550 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
2552 if (GET_CODE (addr) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (addr))
2553 reg = new = orig;
2554 else
2556 if (reg == 0)
2557 reg = gen_reg_rtx (Pmode);
2559 if ((GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_FLAG (addr))
2560 || GET_CODE (addr) == LABEL_REF)
2561 new = gen_rtx (PLUS, Pmode, pic_offset_table_rtx, orig);
2562 else
2563 new = gen_rtx (MEM, Pmode,
2564 gen_rtx (PLUS, Pmode,
2565 pic_offset_table_rtx, orig));
2567 emit_move_insn (reg, new);
2569 current_function_uses_pic_offset_table = 1;
2570 return reg;
2572 else if (GET_CODE (addr) == CONST || GET_CODE (addr) == PLUS)
2574 rtx base;
2576 if (GET_CODE (addr) == CONST)
2578 addr = XEXP (addr, 0);
2579 if (GET_CODE (addr) != PLUS)
2580 abort ();
2583 if (XEXP (addr, 0) == pic_offset_table_rtx)
2584 return orig;
2586 if (reg == 0)
2587 reg = gen_reg_rtx (Pmode);
2589 base = legitimize_pic_address (XEXP (addr, 0), reg);
2590 addr = legitimize_pic_address (XEXP (addr, 1),
2591 base == reg ? NULL_RTX : reg);
2593 if (GET_CODE (addr) == CONST_INT)
2594 return plus_constant (base, INTVAL (addr));
2596 if (GET_CODE (addr) == PLUS && CONSTANT_P (XEXP (addr, 1)))
2598 base = gen_rtx (PLUS, Pmode, base, XEXP (addr, 0));
2599 addr = XEXP (addr, 1);
2601 return gen_rtx (PLUS, Pmode, base, addr);
2603 return new;
2607 /* Emit insns to move operands[1] into operands[0]. */
2609 void
2610 emit_pic_move (operands, mode)
2611 rtx *operands;
2612 enum machine_mode mode;
2614 rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
2616 if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
2617 operands[1] = (rtx) force_reg (SImode, operands[1]);
2618 else
2619 operands[1] = legitimize_pic_address (operands[1], temp);
2623 /* Try machine-dependent ways of modifying an illegitimate address
2624 to be legitimate. If we find one, return the new, valid address.
2625 This macro is used in only one place: `memory_address' in explow.c.
2627 OLDX is the address as it was before break_out_memory_refs was called.
2628 In some cases it is useful to look at this to decide what needs to be done.
2630 MODE and WIN are passed so that this macro can use
2631 GO_IF_LEGITIMATE_ADDRESS.
2633 It is always safe for this macro to do nothing. It exists to recognize
2634 opportunities to optimize the output.
2636 For the 80386, we handle X+REG by loading X into a register R and
2637 using R+REG. R will go in a general reg and indexing will be used.
2638 However, if REG is a broken-out memory address or multiplication,
2639 nothing needs to be done because REG can certainly go in a general reg.
2641 When -fpic is used, special handling is needed for symbolic references.
2642 See comments by legitimize_pic_address in i386.c for details. */
2645 legitimize_address (x, oldx, mode)
2646 register rtx x;
2647 register rtx oldx;
2648 enum machine_mode mode;
2650 int changed = 0;
2651 unsigned log;
2653 if (TARGET_DEBUG_ADDR)
2655 fprintf (stderr, "\n==========\nLEGITIMIZE_ADDRESS, mode = %s\n", GET_MODE_NAME (mode));
2656 debug_rtx (x);
2659 if (flag_pic && SYMBOLIC_CONST (x))
2660 return legitimize_pic_address (x, 0);
2662 /* Canonicalize shifts by 0, 1, 2, 3 into multiply */
2663 if (GET_CODE (x) == ASHIFT
2664 && GET_CODE (XEXP (x, 1)) == CONST_INT
2665 && (log = (unsigned)exact_log2 (INTVAL (XEXP (x, 1)))) < 4)
2667 changed = 1;
2668 x = gen_rtx (MULT, Pmode,
2669 force_reg (Pmode, XEXP (x, 0)),
2670 GEN_INT (1 << log));
2673 if (GET_CODE (x) == PLUS)
2675 /* Canonicalize shifts by 0, 1, 2, 3 into multiply */
2676 if (GET_CODE (XEXP (x, 0)) == ASHIFT
2677 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2678 && (log = (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) < 4)
2680 changed = 1;
2681 XEXP (x, 0) = gen_rtx (MULT, Pmode,
2682 force_reg (Pmode, XEXP (XEXP (x, 0), 0)),
2683 GEN_INT (1 << log));
2686 if (GET_CODE (XEXP (x, 1)) == ASHIFT
2687 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
2688 && (log = (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x, 1), 1)))) < 4)
2690 changed = 1;
2691 XEXP (x, 1) = gen_rtx (MULT, Pmode,
2692 force_reg (Pmode, XEXP (XEXP (x, 1), 0)),
2693 GEN_INT (1 << log));
2696 /* Put multiply first if it isn't already */
2697 if (GET_CODE (XEXP (x, 1)) == MULT)
2699 rtx tmp = XEXP (x, 0);
2700 XEXP (x, 0) = XEXP (x, 1);
2701 XEXP (x, 1) = tmp;
2702 changed = 1;
2705 /* Canonicalize (plus (mult (reg) (const)) (plus (reg) (const)))
2706 into (plus (plus (mult (reg) (const)) (reg)) (const)). This can be
2707 created by virtual register instantiation, register elimination, and
2708 similar optimizations. */
2709 if (GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == PLUS)
2711 changed = 1;
2712 x = gen_rtx (PLUS, Pmode,
2713 gen_rtx (PLUS, Pmode, XEXP (x, 0), XEXP (XEXP (x, 1), 0)),
2714 XEXP (XEXP (x, 1), 1));
2717 /* Canonicalize (plus (plus (mult (reg) (const)) (plus (reg) (const))) const)
2718 into (plus (plus (mult (reg) (const)) (reg)) (const)). */
2719 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == PLUS
2720 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
2721 && GET_CODE (XEXP (XEXP (x, 0), 1)) == PLUS
2722 && CONSTANT_P (XEXP (x, 1)))
2724 rtx constant, other;
2726 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2728 constant = XEXP (x, 1);
2729 other = XEXP (XEXP (XEXP (x, 0), 1), 1);
2731 else if (GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 1)) == CONST_INT)
2733 constant = XEXP (XEXP (XEXP (x, 0), 1), 1);
2734 other = XEXP (x, 1);
2736 else
2737 constant = 0;
2739 if (constant)
2741 changed = 1;
2742 x = gen_rtx (PLUS, Pmode,
2743 gen_rtx (PLUS, Pmode, XEXP (XEXP (x, 0), 0),
2744 XEXP (XEXP (XEXP (x, 0), 1), 0)),
2745 plus_constant (other, INTVAL (constant)));
2749 if (changed && legitimate_address_p (mode, x, FALSE))
2750 return x;
2752 if (GET_CODE (XEXP (x, 0)) == MULT)
2754 changed = 1;
2755 XEXP (x, 0) = force_operand (XEXP (x, 0), 0);
2758 if (GET_CODE (XEXP (x, 1)) == MULT)
2760 changed = 1;
2761 XEXP (x, 1) = force_operand (XEXP (x, 1), 0);
2764 if (changed
2765 && GET_CODE (XEXP (x, 1)) == REG
2766 && GET_CODE (XEXP (x, 0)) == REG)
2767 return x;
2769 if (flag_pic && SYMBOLIC_CONST (XEXP (x, 1)))
2771 changed = 1;
2772 x = legitimize_pic_address (x, 0);
2775 if (changed && legitimate_address_p (mode, x, FALSE))
2776 return x;
2778 if (GET_CODE (XEXP (x, 0)) == REG)
2780 register rtx temp = gen_reg_rtx (Pmode);
2781 register rtx val = force_operand (XEXP (x, 1), temp);
2782 if (val != temp)
2783 emit_move_insn (temp, val);
2785 XEXP (x, 1) = temp;
2786 return x;
2789 else if (GET_CODE (XEXP (x, 1)) == REG)
2791 register rtx temp = gen_reg_rtx (Pmode);
2792 register rtx val = force_operand (XEXP (x, 0), temp);
2793 if (val != temp)
2794 emit_move_insn (temp, val);
2796 XEXP (x, 0) = temp;
2797 return x;
2801 return x;
2805 /* Print an integer constant expression in assembler syntax. Addition
2806 and subtraction are the only arithmetic that may appear in these
2807 expressions. FILE is the stdio stream to write to, X is the rtx, and
2808 CODE is the operand print code from the output string. */
2810 static void
2811 output_pic_addr_const (file, x, code)
2812 FILE *file;
2813 rtx x;
2814 int code;
2816 char buf[256];
2818 switch (GET_CODE (x))
2820 case PC:
2821 if (flag_pic)
2822 putc ('.', file);
2823 else
2824 abort ();
2825 break;
2827 case SYMBOL_REF:
2828 case LABEL_REF:
2829 if (GET_CODE (x) == SYMBOL_REF)
2830 assemble_name (file, XSTR (x, 0));
2831 else
2833 ASM_GENERATE_INTERNAL_LABEL (buf, "L",
2834 CODE_LABEL_NUMBER (XEXP (x, 0)));
2835 assemble_name (asm_out_file, buf);
2838 if (GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
2839 fprintf (file, "@GOTOFF(%%ebx)");
2840 else if (code == 'P')
2841 fprintf (file, "@PLT");
2842 else if (GET_CODE (x) == LABEL_REF)
2843 fprintf (file, "@GOTOFF");
2844 else if (! SYMBOL_REF_FLAG (x))
2845 fprintf (file, "@GOT");
2846 else
2847 fprintf (file, "@GOTOFF");
2849 break;
2851 case CODE_LABEL:
2852 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
2853 assemble_name (asm_out_file, buf);
2854 break;
2856 case CONST_INT:
2857 fprintf (file, "%d", INTVAL (x));
2858 break;
2860 case CONST:
2861 /* This used to output parentheses around the expression,
2862 but that does not work on the 386 (either ATT or BSD assembler). */
2863 output_pic_addr_const (file, XEXP (x, 0), code);
2864 break;
2866 case CONST_DOUBLE:
2867 if (GET_MODE (x) == VOIDmode)
2869 /* We can use %d if the number is <32 bits and positive. */
2870 if (CONST_DOUBLE_HIGH (x) || CONST_DOUBLE_LOW (x) < 0)
2871 fprintf (file, "0x%x%08x",
2872 CONST_DOUBLE_HIGH (x), CONST_DOUBLE_LOW (x));
2873 else
2874 fprintf (file, "%d", CONST_DOUBLE_LOW (x));
2876 else
2877 /* We can't handle floating point constants;
2878 PRINT_OPERAND must handle them. */
2879 output_operand_lossage ("floating constant misused");
2880 break;
2882 case PLUS:
2883 /* Some assemblers need integer constants to appear last (eg masm). */
2884 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
2886 output_pic_addr_const (file, XEXP (x, 1), code);
2887 if (INTVAL (XEXP (x, 0)) >= 0)
2888 fprintf (file, "+");
2889 output_pic_addr_const (file, XEXP (x, 0), code);
2891 else
2893 output_pic_addr_const (file, XEXP (x, 0), code);
2894 if (INTVAL (XEXP (x, 1)) >= 0)
2895 fprintf (file, "+");
2896 output_pic_addr_const (file, XEXP (x, 1), code);
2898 break;
2900 case MINUS:
2901 output_pic_addr_const (file, XEXP (x, 0), code);
2902 fprintf (file, "-");
2903 output_pic_addr_const (file, XEXP (x, 1), code);
2904 break;
2906 default:
2907 output_operand_lossage ("invalid expression as operand");
2911 /* Append the correct conditional move suffix which corresponds to CODE */
2913 static void
2914 put_condition_code (code, reverse_cc, mode, file)
2915 enum rtx_code code;
2916 int reverse_cc;
2917 enum mode_class mode;
2918 FILE * file;
2920 int ieee;
2921 ieee = (TARGET_IEEE_FP && (cc_prev_status.flags & CC_IN_80387)
2922 && ! (cc_prev_status.flags & CC_FCOMI));
2923 if (reverse_cc && ! ieee)
2924 code = reverse_condition (code);
2926 if (mode == MODE_INT)
2927 switch (code)
2929 case NE:
2930 if (cc_prev_status.flags & CC_Z_IN_NOT_C)
2931 fputs ("b", file);
2932 else
2933 fputs ("ne", file);
2934 return;
2935 case EQ:
2936 if (cc_prev_status.flags & CC_Z_IN_NOT_C)
2937 fputs ("ae", file);
2938 else
2939 fputs ("e", file);
2940 return;
2941 case GE:
2942 fputs ("ge", file); return;
2943 case GT:
2944 fputs ("g", file); return;
2945 case LE:
2946 fputs ("le", file); return;
2947 case LT:
2948 fputs ("l", file); return;
2949 case GEU:
2950 fputs ("ae", file); return;
2951 case GTU:
2952 fputs ("a", file); return;
2953 case LEU:
2954 fputs ("be", file); return;
2955 case LTU:
2956 fputs ("b", file); return;
2957 default: output_operand_lossage ("Invalid %%C operand");
2959 else if (mode == MODE_FLOAT)
2960 switch (code)
2962 case NE:
2963 fputs (ieee ? (reverse_cc ? "ne" : "e") : "ne", file); return;
2964 case EQ:
2965 fputs (ieee ? (reverse_cc ? "ne" : "e") : "e", file); return;
2966 case GE:
2967 fputs (ieee ? (reverse_cc ? "ne" : "e") : "nb", file); return;
2968 case GT:
2969 fputs (ieee ? (reverse_cc ? "ne" : "e") : "nbe", file); return;
2970 case LE:
2971 fputs (ieee ? (reverse_cc ? "nb" : "b") : "be", file); return;
2972 case LT:
2973 fputs (ieee ? (reverse_cc ? "ne" : "e") : "b", file); return;
2974 case GEU:
2975 fputs (ieee ? (reverse_cc ? "ne" : "e") : "nb", file); return;
2976 case GTU:
2977 fputs (ieee ? (reverse_cc ? "ne" : "e") : "nbe", file); return;
2978 case LEU:
2979 fputs (ieee ? (reverse_cc ? "nb" : "b") : "be", file); return;
2980 case LTU:
2981 fputs (ieee ? (reverse_cc ? "ne" : "e") : "b", file); return;
2982 default: output_operand_lossage ("Invalid %%C operand");
2986 /* Meaning of CODE:
2987 L,W,B,Q,S,T -- print the opcode suffix for specified size of operand.
2988 C -- print opcode suffix for set/cmov insn.
2989 c -- like C, but print reversed condition
2990 F -- print opcode suffix for fcmov insn.
2991 f -- like C, but print reversed condition
2992 R -- print the prefix for register names.
2993 z -- print the opcode suffix for the size of the current operand.
2994 * -- print a star (in certain assembler syntax)
2995 w -- print the operand as if it's a "word" (HImode) even if it isn't.
2996 c -- don't print special prefixes before constant operands.
2997 J -- print the appropriate jump operand.
2998 s -- print a shift double count, followed by the assemblers argument
2999 delimiter.
3000 b -- print the QImode name of the register for the indicated operand.
3001 %b0 would print %al if operands[0] is reg 0.
3002 w -- likewise, print the HImode name of the register.
3003 k -- likewise, print the SImode name of the register.
3004 h -- print the QImode name for a "high" register, either ah, bh, ch or dh.
3005 y -- print "st(0)" instead of "st" as a register.
3006 P -- print as a PIC constant
3009 void
3010 print_operand (file, x, code)
3011 FILE *file;
3012 rtx x;
3013 int code;
3015 if (code)
3017 switch (code)
3019 case '*':
3020 if (USE_STAR)
3021 putc ('*', file);
3022 return;
3024 case 'L':
3025 PUT_OP_SIZE (code, 'l', file);
3026 return;
3028 case 'W':
3029 PUT_OP_SIZE (code, 'w', file);
3030 return;
3032 case 'B':
3033 PUT_OP_SIZE (code, 'b', file);
3034 return;
3036 case 'Q':
3037 PUT_OP_SIZE (code, 'l', file);
3038 return;
3040 case 'S':
3041 PUT_OP_SIZE (code, 's', file);
3042 return;
3044 case 'T':
3045 PUT_OP_SIZE (code, 't', file);
3046 return;
3048 case 'z':
3049 /* 387 opcodes don't get size suffixes if the operands are
3050 registers. */
3052 if (STACK_REG_P (x))
3053 return;
3055 /* this is the size of op from size of operand */
3056 switch (GET_MODE_SIZE (GET_MODE (x)))
3058 case 1:
3059 PUT_OP_SIZE ('B', 'b', file);
3060 return;
3062 case 2:
3063 PUT_OP_SIZE ('W', 'w', file);
3064 return;
3066 case 4:
3067 if (GET_MODE (x) == SFmode)
3069 PUT_OP_SIZE ('S', 's', file);
3070 return;
3072 else
3073 PUT_OP_SIZE ('L', 'l', file);
3074 return;
3076 case 12:
3077 PUT_OP_SIZE ('T', 't', file);
3078 return;
3080 case 8:
3081 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT)
3083 #ifdef GAS_MNEMONICS
3084 PUT_OP_SIZE ('Q', 'q', file);
3085 return;
3086 #else
3087 PUT_OP_SIZE ('Q', 'l', file); /* Fall through */
3088 #endif
3091 PUT_OP_SIZE ('Q', 'l', file);
3092 return;
3095 case 'b':
3096 case 'w':
3097 case 'k':
3098 case 'h':
3099 case 'y':
3100 case 'P':
3101 break;
3103 case 'J':
3104 switch (GET_CODE (x))
3106 /* These conditions are appropriate for testing the result
3107 of an arithmetic operation, not for a compare operation.
3108 Cases GE, LT assume CC_NO_OVERFLOW true. All cases assume
3109 CC_Z_IN_NOT_C false and not floating point. */
3110 case NE: fputs ("jne", file); return;
3111 case EQ: fputs ("je", file); return;
3112 case GE: fputs ("jns", file); return;
3113 case LT: fputs ("js", file); return;
3114 case GEU: fputs ("jmp", file); return;
3115 case GTU: fputs ("jne", file); return;
3116 case LEU: fputs ("je", file); return;
3117 case LTU: fputs ("#branch never", file); return;
3119 /* no matching branches for GT nor LE */
3121 abort ();
3123 case 's':
3124 if (GET_CODE (x) == CONST_INT || ! SHIFT_DOUBLE_OMITS_COUNT)
3126 PRINT_OPERAND (file, x, 0);
3127 fputs (AS2C (,) + 1, file);
3129 return;
3131 /* This is used by the conditional move instructions. */
3132 case 'C':
3133 put_condition_code (GET_CODE (x), 0, MODE_INT, file);
3134 return;
3136 /* like above, but reverse condition */
3137 case 'c':
3138 put_condition_code (GET_CODE (x), 1, MODE_INT, file); return;
3140 case 'F':
3141 put_condition_code (GET_CODE (x), 0, MODE_FLOAT, file);
3142 return;
3144 /* like above, but reverse condition */
3145 case 'f':
3146 put_condition_code (GET_CODE (x), 1, MODE_FLOAT, file);
3147 return;
3149 default:
3151 char str[50];
3153 sprintf (str, "invalid operand code `%c'", code);
3154 output_operand_lossage (str);
3158 if (GET_CODE (x) == REG)
3160 PRINT_REG (x, code, file);
3162 else if (GET_CODE (x) == MEM)
3164 PRINT_PTR (x, file);
3165 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
3167 if (flag_pic)
3168 output_pic_addr_const (file, XEXP (x, 0), code);
3169 else
3170 output_addr_const (file, XEXP (x, 0));
3172 else
3173 output_address (XEXP (x, 0));
3175 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
3177 REAL_VALUE_TYPE r; long l;
3178 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3179 REAL_VALUE_TO_TARGET_SINGLE (r, l);
3180 PRINT_IMMED_PREFIX (file);
3181 fprintf (file, "0x%x", l);
3183 /* These float cases don't actually occur as immediate operands. */
3184 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
3186 REAL_VALUE_TYPE r; char dstr[30];
3187 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3188 REAL_VALUE_TO_DECIMAL (r, "%.22e", dstr);
3189 fprintf (file, "%s", dstr);
3191 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == XFmode)
3193 REAL_VALUE_TYPE r; char dstr[30];
3194 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3195 REAL_VALUE_TO_DECIMAL (r, "%.22e", dstr);
3196 fprintf (file, "%s", dstr);
3198 else
3200 if (code != 'P')
3202 if (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
3203 PRINT_IMMED_PREFIX (file);
3204 else if (GET_CODE (x) == CONST || GET_CODE (x) == SYMBOL_REF
3205 || GET_CODE (x) == LABEL_REF)
3206 PRINT_OFFSET_PREFIX (file);
3208 if (flag_pic)
3209 output_pic_addr_const (file, x, code);
3210 else
3211 output_addr_const (file, x);
3215 /* Print a memory operand whose address is ADDR. */
3217 void
3218 print_operand_address (file, addr)
3219 FILE *file;
3220 register rtx addr;
3222 register rtx reg1, reg2, breg, ireg;
3223 rtx offset;
3225 switch (GET_CODE (addr))
3227 case REG:
3228 ADDR_BEG (file);
3229 fprintf (file, "%se", RP);
3230 fputs (hi_reg_name[REGNO (addr)], file);
3231 ADDR_END (file);
3232 break;
3234 case PLUS:
3235 reg1 = 0;
3236 reg2 = 0;
3237 ireg = 0;
3238 breg = 0;
3239 offset = 0;
3240 if (CONSTANT_ADDRESS_P (XEXP (addr, 0)))
3242 offset = XEXP (addr, 0);
3243 addr = XEXP (addr, 1);
3245 else if (CONSTANT_ADDRESS_P (XEXP (addr, 1)))
3247 offset = XEXP (addr, 1);
3248 addr = XEXP (addr, 0);
3250 if (GET_CODE (addr) != PLUS) ;
3251 else if (GET_CODE (XEXP (addr, 0)) == MULT)
3253 reg1 = XEXP (addr, 0);
3254 addr = XEXP (addr, 1);
3256 else if (GET_CODE (XEXP (addr, 1)) == MULT)
3258 reg1 = XEXP (addr, 1);
3259 addr = XEXP (addr, 0);
3261 else if (GET_CODE (XEXP (addr, 0)) == REG)
3263 reg1 = XEXP (addr, 0);
3264 addr = XEXP (addr, 1);
3266 else if (GET_CODE (XEXP (addr, 1)) == REG)
3268 reg1 = XEXP (addr, 1);
3269 addr = XEXP (addr, 0);
3271 if (GET_CODE (addr) == REG || GET_CODE (addr) == MULT)
3273 if (reg1 == 0) reg1 = addr;
3274 else reg2 = addr;
3275 addr = 0;
3277 if (offset != 0)
3279 if (addr != 0) abort ();
3280 addr = offset;
3282 if ((reg1 && GET_CODE (reg1) == MULT)
3283 || (reg2 != 0 && REGNO_OK_FOR_BASE_P (REGNO (reg2))))
3285 breg = reg2;
3286 ireg = reg1;
3288 else if (reg1 != 0 && REGNO_OK_FOR_BASE_P (REGNO (reg1)))
3290 breg = reg1;
3291 ireg = reg2;
3294 if (ireg != 0 || breg != 0)
3296 int scale = 1;
3298 if (addr != 0)
3300 if (flag_pic)
3301 output_pic_addr_const (file, addr, 0);
3303 else if (GET_CODE (addr) == LABEL_REF)
3304 output_asm_label (addr);
3306 else
3307 output_addr_const (file, addr);
3310 if (ireg != 0 && GET_CODE (ireg) == MULT)
3312 scale = INTVAL (XEXP (ireg, 1));
3313 ireg = XEXP (ireg, 0);
3316 /* The stack pointer can only appear as a base register,
3317 never an index register, so exchange the regs if it is wrong. */
3319 if (scale == 1 && ireg && REGNO (ireg) == STACK_POINTER_REGNUM)
3321 rtx tmp;
3323 tmp = breg;
3324 breg = ireg;
3325 ireg = tmp;
3328 /* output breg+ireg*scale */
3329 PRINT_B_I_S (breg, ireg, scale, file);
3330 break;
3333 case MULT:
3335 int scale;
3336 if (GET_CODE (XEXP (addr, 0)) == CONST_INT)
3338 scale = INTVAL (XEXP (addr, 0));
3339 ireg = XEXP (addr, 1);
3341 else
3343 scale = INTVAL (XEXP (addr, 1));
3344 ireg = XEXP (addr, 0);
3346 output_addr_const (file, const0_rtx);
3347 PRINT_B_I_S ((rtx) 0, ireg, scale, file);
3349 break;
3351 default:
3352 if (GET_CODE (addr) == CONST_INT
3353 && INTVAL (addr) < 0x8000
3354 && INTVAL (addr) >= -0x8000)
3355 fprintf (file, "%d", INTVAL (addr));
3356 else
3358 if (flag_pic)
3359 output_pic_addr_const (file, addr, 0);
3360 else
3361 output_addr_const (file, addr);
3366 /* Set the cc_status for the results of an insn whose pattern is EXP.
3367 On the 80386, we assume that only test and compare insns, as well
3368 as SI, HI, & DI mode ADD, SUB, NEG, AND, IOR, XOR, ASHIFT,
3369 ASHIFTRT, and LSHIFTRT instructions set the condition codes usefully.
3370 Also, we assume that jumps, moves and sCOND don't affect the condition
3371 codes. All else clobbers the condition codes, by assumption.
3373 We assume that ALL integer add, minus, etc. instructions effect the
3374 condition codes. This MUST be consistent with i386.md.
3376 We don't record any float test or compare - the redundant test &
3377 compare check in final.c does not handle stack-like regs correctly. */
3379 void
3380 notice_update_cc (exp)
3381 rtx exp;
3383 if (GET_CODE (exp) == SET)
3385 /* Jumps do not alter the cc's. */
3386 if (SET_DEST (exp) == pc_rtx)
3387 return;
3389 /* Moving register or memory into a register:
3390 it doesn't alter the cc's, but it might invalidate
3391 the RTX's which we remember the cc's came from.
3392 (Note that moving a constant 0 or 1 MAY set the cc's). */
3393 if (REG_P (SET_DEST (exp))
3394 && (REG_P (SET_SRC (exp)) || GET_CODE (SET_SRC (exp)) == MEM
3395 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp))) == '<'))
3397 if (cc_status.value1
3398 && reg_overlap_mentioned_p (SET_DEST (exp), cc_status.value1))
3399 cc_status.value1 = 0;
3400 if (cc_status.value2
3401 && reg_overlap_mentioned_p (SET_DEST (exp), cc_status.value2))
3402 cc_status.value2 = 0;
3403 return;
3405 /* Moving register into memory doesn't alter the cc's.
3406 It may invalidate the RTX's which we remember the cc's came from. */
3407 if (GET_CODE (SET_DEST (exp)) == MEM
3408 && (REG_P (SET_SRC (exp))
3409 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp))) == '<'))
3411 if (cc_status.value1 && GET_CODE (cc_status.value1) == MEM
3412 || reg_mentioned_p (SET_DEST (exp), cc_status.value1))
3413 cc_status.value1 = 0;
3414 if (cc_status.value2 && GET_CODE (cc_status.value2) == MEM
3415 || reg_mentioned_p (SET_DEST (exp), cc_status.value2))
3416 cc_status.value2 = 0;
3417 return;
3419 /* Function calls clobber the cc's. */
3420 else if (GET_CODE (SET_SRC (exp)) == CALL)
3422 CC_STATUS_INIT;
3423 return;
3425 /* Tests and compares set the cc's in predictable ways. */
3426 else if (SET_DEST (exp) == cc0_rtx)
3428 CC_STATUS_INIT;
3429 cc_status.value1 = SET_SRC (exp);
3430 return;
3432 /* Certain instructions effect the condition codes. */
3433 else if (GET_MODE (SET_SRC (exp)) == SImode
3434 || GET_MODE (SET_SRC (exp)) == HImode
3435 || GET_MODE (SET_SRC (exp)) == QImode)
3436 switch (GET_CODE (SET_SRC (exp)))
3438 case ASHIFTRT: case LSHIFTRT:
3439 case ASHIFT:
3440 /* Shifts on the 386 don't set the condition codes if the
3441 shift count is zero. */
3442 if (GET_CODE (XEXP (SET_SRC (exp), 1)) != CONST_INT)
3444 CC_STATUS_INIT;
3445 break;
3447 /* We assume that the CONST_INT is non-zero (this rtx would
3448 have been deleted if it were zero. */
3450 case PLUS: case MINUS: case NEG:
3451 case AND: case IOR: case XOR:
3452 cc_status.flags = CC_NO_OVERFLOW;
3453 cc_status.value1 = SET_SRC (exp);
3454 cc_status.value2 = SET_DEST (exp);
3455 break;
3457 default:
3458 CC_STATUS_INIT;
3460 else
3462 CC_STATUS_INIT;
3465 else if (GET_CODE (exp) == PARALLEL
3466 && GET_CODE (XVECEXP (exp, 0, 0)) == SET)
3468 if (SET_DEST (XVECEXP (exp, 0, 0)) == pc_rtx)
3469 return;
3470 if (SET_DEST (XVECEXP (exp, 0, 0)) == cc0_rtx)
3472 CC_STATUS_INIT;
3473 if (stack_regs_mentioned_p (SET_SRC (XVECEXP (exp, 0, 0))))
3475 cc_status.flags |= CC_IN_80387;
3476 if (TARGET_CMOVE && stack_regs_mentioned_p
3477 (XEXP (SET_SRC (XVECEXP (exp, 0, 0)), 1)))
3478 cc_status.flags |= CC_FCOMI;
3480 else
3481 cc_status.value1 = SET_SRC (XVECEXP (exp, 0, 0));
3482 return;
3484 CC_STATUS_INIT;
3486 else
3488 CC_STATUS_INIT;
3492 /* Split one or more DImode RTL references into pairs of SImode
3493 references. The RTL can be REG, offsettable MEM, integer constant, or
3494 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
3495 split and "num" is its length. lo_half and hi_half are output arrays
3496 that parallel "operands". */
3498 void
3499 split_di (operands, num, lo_half, hi_half)
3500 rtx operands[];
3501 int num;
3502 rtx lo_half[], hi_half[];
3504 while (num--)
3506 if (GET_CODE (operands[num]) == REG)
3508 lo_half[num] = gen_rtx (REG, SImode, REGNO (operands[num]));
3509 hi_half[num] = gen_rtx (REG, SImode, REGNO (operands[num]) + 1);
3511 else if (CONSTANT_P (operands[num]))
3513 split_double (operands[num], &lo_half[num], &hi_half[num]);
3515 else if (offsettable_memref_p (operands[num]))
3517 lo_half[num] = operands[num];
3518 hi_half[num] = adj_offsettable_operand (operands[num], 4);
3520 else
3521 abort();
3525 /* Return 1 if this is a valid binary operation on a 387.
3526 OP is the expression matched, and MODE is its mode. */
3529 binary_387_op (op, mode)
3530 register rtx op;
3531 enum machine_mode mode;
3533 if (mode != VOIDmode && mode != GET_MODE (op))
3534 return 0;
3536 switch (GET_CODE (op))
3538 case PLUS:
3539 case MINUS:
3540 case MULT:
3541 case DIV:
3542 return GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT;
3544 default:
3545 return 0;
3550 /* Return 1 if this is a valid shift or rotate operation on a 386.
3551 OP is the expression matched, and MODE is its mode. */
3554 shift_op (op, mode)
3555 register rtx op;
3556 enum machine_mode mode;
3558 rtx operand = XEXP (op, 0);
3560 if (mode != VOIDmode && mode != GET_MODE (op))
3561 return 0;
3563 if (GET_MODE (operand) != GET_MODE (op)
3564 || GET_MODE_CLASS (GET_MODE (op)) != MODE_INT)
3565 return 0;
3567 return (GET_CODE (op) == ASHIFT
3568 || GET_CODE (op) == ASHIFTRT
3569 || GET_CODE (op) == LSHIFTRT
3570 || GET_CODE (op) == ROTATE
3571 || GET_CODE (op) == ROTATERT);
3574 /* Return 1 if OP is COMPARE rtx with mode VOIDmode.
3575 MODE is not used. */
3578 VOIDmode_compare_op (op, mode)
3579 register rtx op;
3580 enum machine_mode mode;
3582 return GET_CODE (op) == COMPARE && GET_MODE (op) == VOIDmode;
3585 /* Output code to perform a 387 binary operation in INSN, one of PLUS,
3586 MINUS, MULT or DIV. OPERANDS are the insn operands, where operands[3]
3587 is the expression of the binary operation. The output may either be
3588 emitted here, or returned to the caller, like all output_* functions.
3590 There is no guarantee that the operands are the same mode, as they
3591 might be within FLOAT or FLOAT_EXTEND expressions. */
3593 char *
3594 output_387_binary_op (insn, operands)
3595 rtx insn;
3596 rtx *operands;
3598 rtx temp;
3599 char *base_op;
3600 static char buf[100];
3602 switch (GET_CODE (operands[3]))
3604 case PLUS:
3605 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3606 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3607 base_op = "fiadd";
3608 else
3609 base_op = "fadd";
3610 break;
3612 case MINUS:
3613 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3614 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3615 base_op = "fisub";
3616 else
3617 base_op = "fsub";
3618 break;
3620 case MULT:
3621 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3622 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3623 base_op = "fimul";
3624 else
3625 base_op = "fmul";
3626 break;
3628 case DIV:
3629 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3630 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3631 base_op = "fidiv";
3632 else
3633 base_op = "fdiv";
3634 break;
3636 default:
3637 abort ();
3640 strcpy (buf, base_op);
3642 switch (GET_CODE (operands[3]))
3644 case MULT:
3645 case PLUS:
3646 if (REG_P (operands[2]) && REGNO (operands[0]) == REGNO (operands[2]))
3648 temp = operands[2];
3649 operands[2] = operands[1];
3650 operands[1] = temp;
3653 if (GET_CODE (operands[2]) == MEM)
3654 return strcat (buf, AS1 (%z2,%2));
3656 if (NON_STACK_REG_P (operands[1]))
3658 output_op_from_reg (operands[1], strcat (buf, AS1 (%z0,%1)));
3659 RET;
3661 else if (NON_STACK_REG_P (operands[2]))
3663 output_op_from_reg (operands[2], strcat (buf, AS1 (%z0,%1)));
3664 RET;
3667 if (find_regno_note (insn, REG_DEAD, REGNO (operands[2])))
3668 return strcat (buf, AS2 (p,%2,%0));
3670 if (STACK_TOP_P (operands[0]))
3671 return strcat (buf, AS2C (%y2,%0));
3672 else
3673 return strcat (buf, AS2C (%2,%0));
3675 case MINUS:
3676 case DIV:
3677 if (GET_CODE (operands[1]) == MEM)
3678 return strcat (buf, AS1 (r%z1,%1));
3680 if (GET_CODE (operands[2]) == MEM)
3681 return strcat (buf, AS1 (%z2,%2));
3683 if (NON_STACK_REG_P (operands[1]))
3685 output_op_from_reg (operands[1], strcat (buf, AS1 (r%z0,%1)));
3686 RET;
3688 else if (NON_STACK_REG_P (operands[2]))
3690 output_op_from_reg (operands[2], strcat (buf, AS1 (%z0,%1)));
3691 RET;
3694 if (! STACK_REG_P (operands[1]) || ! STACK_REG_P (operands[2]))
3695 abort ();
3697 if (find_regno_note (insn, REG_DEAD, REGNO (operands[2])))
3698 return strcat (buf, AS2 (rp,%2,%0));
3700 if (find_regno_note (insn, REG_DEAD, REGNO (operands[1])))
3701 return strcat (buf, AS2 (p,%1,%0));
3703 if (STACK_TOP_P (operands[0]))
3705 if (STACK_TOP_P (operands[1]))
3706 return strcat (buf, AS2C (%y2,%0));
3707 else
3708 return strcat (buf, AS2 (r,%y1,%0));
3710 else if (STACK_TOP_P (operands[1]))
3711 return strcat (buf, AS2C (%1,%0));
3712 else
3713 return strcat (buf, AS2 (r,%2,%0));
3715 default:
3716 abort ();
3720 /* Output code for INSN to convert a float to a signed int. OPERANDS
3721 are the insn operands. The output may be SFmode or DFmode and the
3722 input operand may be SImode or DImode. As a special case, make sure
3723 that the 387 stack top dies if the output mode is DImode, because the
3724 hardware requires this. */
3726 char *
3727 output_fix_trunc (insn, operands)
3728 rtx insn;
3729 rtx *operands;
3731 int stack_top_dies = find_regno_note (insn, REG_DEAD, FIRST_STACK_REG) != 0;
3732 rtx xops[2];
3734 if (! STACK_TOP_P (operands[1]) ||
3735 (GET_MODE (operands[0]) == DImode && ! stack_top_dies))
3736 abort ();
3738 xops[0] = GEN_INT (12);
3739 xops[1] = operands[4];
3741 output_asm_insn (AS1 (fnstc%W2,%2), operands);
3742 output_asm_insn (AS2 (mov%L2,%2,%4), operands);
3743 output_asm_insn (AS2 (mov%B1,%0,%h1), xops);
3744 output_asm_insn (AS2 (mov%L4,%4,%3), operands);
3745 output_asm_insn (AS1 (fldc%W3,%3), operands);
3747 if (NON_STACK_REG_P (operands[0]))
3748 output_to_reg (operands[0], stack_top_dies, operands[3]);
3749 else if (GET_CODE (operands[0]) == MEM)
3751 if (stack_top_dies)
3752 output_asm_insn (AS1 (fistp%z0,%0), operands);
3753 else
3754 output_asm_insn (AS1 (fist%z0,%0), operands);
3756 else
3757 abort ();
3759 return AS1 (fldc%W2,%2);
3762 /* Output code for INSN to compare OPERANDS. The two operands might
3763 not have the same mode: one might be within a FLOAT or FLOAT_EXTEND
3764 expression. If the compare is in mode CCFPEQmode, use an opcode that
3765 will not fault if a qNaN is present. */
3767 char *
3768 output_float_compare (insn, operands)
3769 rtx insn;
3770 rtx *operands;
3772 int stack_top_dies;
3773 rtx body = XVECEXP (PATTERN (insn), 0, 0);
3774 int unordered_compare = GET_MODE (SET_SRC (body)) == CCFPEQmode;
3775 rtx tmp;
3777 if (TARGET_CMOVE && STACK_REG_P (operands[1]))
3779 cc_status.flags |= CC_FCOMI;
3780 cc_prev_status.flags &= ~CC_TEST_AX;
3783 if (! STACK_TOP_P (operands[0]))
3785 tmp = operands[0];
3786 operands[0] = operands[1];
3787 operands[1] = tmp;
3788 cc_status.flags |= CC_REVERSED;
3791 if (! STACK_TOP_P (operands[0]))
3792 abort ();
3794 stack_top_dies = find_regno_note (insn, REG_DEAD, FIRST_STACK_REG) != 0;
3796 if (STACK_REG_P (operands[1])
3797 && stack_top_dies
3798 && find_regno_note (insn, REG_DEAD, REGNO (operands[1]))
3799 && REGNO (operands[1]) != FIRST_STACK_REG)
3801 /* If both the top of the 387 stack dies, and the other operand
3802 is also a stack register that dies, then this must be a
3803 `fcompp' float compare */
3805 if (unordered_compare)
3806 if (cc_status.flags & CC_FCOMI)
3808 output_asm_insn (AS2 (fucomip,%y1,%0), operands);
3809 output_asm_insn (AS1 (fstp, %y0), operands);
3810 RET;
3812 else
3813 output_asm_insn ("fucompp", operands);
3814 else
3816 if (cc_status.flags & CC_FCOMI)
3818 output_asm_insn (AS2 (fcomip, %y1,%0), operands);
3819 output_asm_insn (AS1 (fstp, %y0), operands);
3820 RET;
3822 else
3823 output_asm_insn ("fcompp", operands);
3826 else
3828 static char buf[100];
3830 /* Decide if this is the integer or float compare opcode, or the
3831 unordered float compare. */
3833 if (unordered_compare)
3834 strcpy (buf, (cc_status.flags & CC_FCOMI) ? "fucomi" : "fucom");
3835 else if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_FLOAT)
3836 strcpy (buf, (cc_status.flags & CC_FCOMI) ? "fcomi" : "fcom");
3837 else
3838 strcpy (buf, "ficom");
3840 /* Modify the opcode if the 387 stack is to be popped. */
3842 if (stack_top_dies)
3843 strcat (buf, "p");
3845 if (NON_STACK_REG_P (operands[1]))
3846 output_op_from_reg (operands[1], strcat (buf, AS1 (%z0,%1)));
3847 else if (cc_status.flags & CC_FCOMI)
3849 output_asm_insn (strcat (buf, AS2 (%z1,%y1,%0)), operands);
3850 RET;
3852 else
3853 output_asm_insn (strcat (buf, AS1 (%z1,%y1)), operands);
3856 /* Now retrieve the condition code. */
3858 return output_fp_cc0_set (insn);
3861 /* Output opcodes to transfer the results of FP compare or test INSN
3862 from the FPU to the CPU flags. If TARGET_IEEE_FP, ensure that if the
3863 result of the compare or test is unordered, no comparison operator
3864 succeeds except NE. Return an output template, if any. */
3866 char *
3867 output_fp_cc0_set (insn)
3868 rtx insn;
3870 rtx xops[3];
3871 rtx unordered_label;
3872 rtx next;
3873 enum rtx_code code;
3875 xops[0] = gen_rtx (REG, HImode, 0);
3876 output_asm_insn (AS1 (fnsts%W0,%0), xops);
3878 if (! TARGET_IEEE_FP)
3880 if (!(cc_status.flags & CC_REVERSED))
3882 next = next_cc0_user (insn);
3884 if (GET_CODE (next) == JUMP_INSN
3885 && GET_CODE (PATTERN (next)) == SET
3886 && SET_DEST (PATTERN (next)) == pc_rtx
3887 && GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
3889 code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
3891 else if (GET_CODE (PATTERN (next)) == SET)
3893 code = GET_CODE (SET_SRC (PATTERN (next)));
3895 else
3897 return "sahf";
3899 if (code == GT || code == LT || code == EQ || code == NE
3900 || code == LE || code == GE)
3901 { /* We will test eax directly */
3902 cc_status.flags |= CC_TEST_AX;
3903 RET;
3906 return "sahf";
3909 next = next_cc0_user (insn);
3910 if (next == NULL_RTX)
3911 abort ();
3913 if (GET_CODE (next) == JUMP_INSN
3914 && GET_CODE (PATTERN (next)) == SET
3915 && SET_DEST (PATTERN (next)) == pc_rtx
3916 && GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
3918 code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
3920 else if (GET_CODE (PATTERN (next)) == SET)
3922 if (GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
3923 code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
3924 else code = GET_CODE (SET_SRC (PATTERN (next)));
3926 else if (GET_CODE (PATTERN (next)) == PARALLEL
3927 && GET_CODE (XVECEXP (PATTERN (next), 0, 0)) == SET)
3929 if (GET_CODE (SET_SRC (XVECEXP (PATTERN (next), 0, 0))) == IF_THEN_ELSE)
3930 code = GET_CODE (XEXP (SET_SRC (XVECEXP (PATTERN (next), 0, 0)), 0));
3931 else code = GET_CODE (SET_SRC (XVECEXP (PATTERN (next), 0, 0)));
3933 else
3934 abort ();
3936 xops[0] = gen_rtx (REG, QImode, 0);
3938 switch (code)
3940 case GT:
3941 xops[1] = GEN_INT (0x45);
3942 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3943 /* je label */
3944 break;
3946 case LT:
3947 xops[1] = GEN_INT (0x45);
3948 xops[2] = GEN_INT (0x01);
3949 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3950 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
3951 /* je label */
3952 break;
3954 case GE:
3955 xops[1] = GEN_INT (0x05);
3956 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3957 /* je label */
3958 break;
3960 case LE:
3961 xops[1] = GEN_INT (0x45);
3962 xops[2] = GEN_INT (0x40);
3963 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3964 output_asm_insn (AS1 (dec%B0,%h0), xops);
3965 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
3966 /* jb label */
3967 break;
3969 case EQ:
3970 xops[1] = GEN_INT (0x45);
3971 xops[2] = GEN_INT (0x40);
3972 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3973 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
3974 /* je label */
3975 break;
3977 case NE:
3978 xops[1] = GEN_INT (0x44);
3979 xops[2] = GEN_INT (0x40);
3980 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3981 output_asm_insn (AS2 (xor%B0,%2,%h0), xops);
3982 /* jne label */
3983 break;
3985 case GTU:
3986 case LTU:
3987 case GEU:
3988 case LEU:
3989 default:
3990 abort ();
3992 RET;
3995 #define MAX_386_STACK_LOCALS 2
3997 static rtx i386_stack_locals[(int) MAX_MACHINE_MODE][MAX_386_STACK_LOCALS];
3999 /* Define the structure for the machine field in struct function. */
4000 struct machine_function
4002 rtx i386_stack_locals[(int) MAX_MACHINE_MODE][MAX_386_STACK_LOCALS];
4005 /* Functions to save and restore i386_stack_locals.
4006 These will be called, via pointer variables,
4007 from push_function_context and pop_function_context. */
4009 void
4010 save_386_machine_status (p)
4011 struct function *p;
4013 p->machine = (struct machine_function *) xmalloc (sizeof i386_stack_locals);
4014 bcopy ((char *) i386_stack_locals, (char *) p->machine->i386_stack_locals,
4015 sizeof i386_stack_locals);
4018 void
4019 restore_386_machine_status (p)
4020 struct function *p;
4022 bcopy ((char *) p->machine->i386_stack_locals, (char *) i386_stack_locals,
4023 sizeof i386_stack_locals);
4024 free (p->machine);
4027 /* Clear stack slot assignments remembered from previous functions.
4028 This is called from INIT_EXPANDERS once before RTL is emitted for each
4029 function. */
4031 void
4032 clear_386_stack_locals ()
4034 enum machine_mode mode;
4035 int n;
4037 for (mode = VOIDmode; (int) mode < (int) MAX_MACHINE_MODE;
4038 mode = (enum machine_mode) ((int) mode + 1))
4039 for (n = 0; n < MAX_386_STACK_LOCALS; n++)
4040 i386_stack_locals[(int) mode][n] = NULL_RTX;
4042 /* Arrange to save and restore i386_stack_locals around nested functions. */
4043 save_machine_status = save_386_machine_status;
4044 restore_machine_status = restore_386_machine_status;
4047 /* Return a MEM corresponding to a stack slot with mode MODE.
4048 Allocate a new slot if necessary.
4050 The RTL for a function can have several slots available: N is
4051 which slot to use. */
4054 assign_386_stack_local (mode, n)
4055 enum machine_mode mode;
4056 int n;
4058 if (n < 0 || n >= MAX_386_STACK_LOCALS)
4059 abort ();
4061 if (i386_stack_locals[(int) mode][n] == NULL_RTX)
4062 i386_stack_locals[(int) mode][n]
4063 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
4065 return i386_stack_locals[(int) mode][n];
4069 int is_mul(op,mode)
4070 register rtx op;
4071 enum machine_mode mode;
4073 return (GET_CODE (op) == MULT);
4076 int is_div(op,mode)
4077 register rtx op;
4078 enum machine_mode mode;
4080 return (GET_CODE (op) == DIV);
4084 #ifdef NOTYET
4085 /* Create a new copy of an rtx.
4086 Recursively copies the operands of the rtx,
4087 except for those few rtx codes that are sharable.
4088 Doesn't share CONST */
4091 copy_all_rtx (orig)
4092 register rtx orig;
4094 register rtx copy;
4095 register int i, j;
4096 register RTX_CODE code;
4097 register char *format_ptr;
4099 code = GET_CODE (orig);
4101 switch (code)
4103 case REG:
4104 case QUEUED:
4105 case CONST_INT:
4106 case CONST_DOUBLE:
4107 case SYMBOL_REF:
4108 case CODE_LABEL:
4109 case PC:
4110 case CC0:
4111 case SCRATCH:
4112 /* SCRATCH must be shared because they represent distinct values. */
4113 return orig;
4115 #if 0
4116 case CONST:
4117 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
4118 a LABEL_REF, it isn't sharable. */
4119 if (GET_CODE (XEXP (orig, 0)) == PLUS
4120 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
4121 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
4122 return orig;
4123 break;
4124 #endif
4125 /* A MEM with a constant address is not sharable. The problem is that
4126 the constant address may need to be reloaded. If the mem is shared,
4127 then reloading one copy of this mem will cause all copies to appear
4128 to have been reloaded. */
4131 copy = rtx_alloc (code);
4132 PUT_MODE (copy, GET_MODE (orig));
4133 copy->in_struct = orig->in_struct;
4134 copy->volatil = orig->volatil;
4135 copy->unchanging = orig->unchanging;
4136 copy->integrated = orig->integrated;
4137 /* intel1 */
4138 copy->is_spill_rtx = orig->is_spill_rtx;
4140 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
4142 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
4144 switch (*format_ptr++)
4146 case 'e':
4147 XEXP (copy, i) = XEXP (orig, i);
4148 if (XEXP (orig, i) != NULL)
4149 XEXP (copy, i) = copy_rtx (XEXP (orig, i));
4150 break;
4152 case '0':
4153 case 'u':
4154 XEXP (copy, i) = XEXP (orig, i);
4155 break;
4157 case 'E':
4158 case 'V':
4159 XVEC (copy, i) = XVEC (orig, i);
4160 if (XVEC (orig, i) != NULL)
4162 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
4163 for (j = 0; j < XVECLEN (copy, i); j++)
4164 XVECEXP (copy, i, j) = copy_rtx (XVECEXP (orig, i, j));
4166 break;
4168 case 'w':
4169 XWINT (copy, i) = XWINT (orig, i);
4170 break;
4172 case 'i':
4173 XINT (copy, i) = XINT (orig, i);
4174 break;
4176 case 's':
4177 case 'S':
4178 XSTR (copy, i) = XSTR (orig, i);
4179 break;
4181 default:
4182 abort ();
4185 return copy;
4189 /* try to rewrite a memory address to make it valid */
4190 void
4191 rewrite_address (mem_rtx)
4192 rtx mem_rtx;
4194 rtx index_rtx, base_rtx, offset_rtx, scale_rtx, ret_rtx;
4195 int scale = 1;
4196 int offset_adjust = 0;
4197 int was_only_offset = 0;
4198 rtx mem_addr = XEXP (mem_rtx, 0);
4199 char *storage = (char *) oballoc (0);
4200 int in_struct = 0;
4201 int is_spill_rtx = 0;
4203 in_struct = MEM_IN_STRUCT_P (mem_rtx);
4204 is_spill_rtx = RTX_IS_SPILL_P (mem_rtx);
4206 if (GET_CODE (mem_addr) == PLUS &&
4207 GET_CODE (XEXP (mem_addr, 1)) == PLUS &&
4208 GET_CODE (XEXP (XEXP (mem_addr, 1), 0)) == REG)
4209 { /* this part is utilized by the combiner */
4210 ret_rtx =
4211 gen_rtx (PLUS, GET_MODE (mem_addr),
4212 gen_rtx (PLUS, GET_MODE (XEXP (mem_addr, 1)),
4213 XEXP (mem_addr, 0),
4214 XEXP (XEXP (mem_addr, 1), 0)),
4215 XEXP (XEXP (mem_addr, 1), 1));
4216 if (memory_address_p (GET_MODE (mem_rtx), ret_rtx))
4218 XEXP (mem_rtx, 0) = ret_rtx;
4219 RTX_IS_SPILL_P (ret_rtx) = is_spill_rtx;
4220 return;
4222 obfree (storage);
4225 /* this part is utilized by loop.c */
4226 /* If the address contains PLUS (reg,const) and this pattern is invalid
4227 in this case - try to rewrite the address to make it valid intel1
4229 storage = (char *) oballoc (0);
4230 index_rtx = base_rtx = offset_rtx = NULL;
4231 /* find the base index and offset elements of the memory address */
4232 if (GET_CODE (mem_addr) == PLUS)
4234 if (GET_CODE (XEXP (mem_addr, 0)) == REG)
4236 if (GET_CODE (XEXP (mem_addr, 1)) == REG)
4238 base_rtx = XEXP (mem_addr, 1);
4239 index_rtx = XEXP (mem_addr, 0);
4241 else
4243 base_rtx = XEXP (mem_addr, 0);
4244 offset_rtx = XEXP (mem_addr, 1);
4247 else if (GET_CODE (XEXP (mem_addr, 0)) == MULT)
4249 index_rtx = XEXP (mem_addr, 0);
4250 if (GET_CODE (XEXP (mem_addr, 1)) == REG)
4252 base_rtx = XEXP (mem_addr, 1);
4254 else
4256 offset_rtx = XEXP (mem_addr, 1);
4259 else if (GET_CODE (XEXP (mem_addr, 0)) == PLUS)
4261 /* intel1 */
4262 if (GET_CODE (XEXP (XEXP (mem_addr, 0), 0)) == PLUS &&
4263 GET_CODE (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0)) == MULT &&
4264 GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0), 0)) == REG &&
4265 GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0), 1)) == CONST_INT &&
4266 GET_CODE (XEXP (XEXP (XEXP (mem_addr, 0), 0), 1)) == CONST_INT &&
4267 GET_CODE (XEXP (XEXP (mem_addr, 0), 1)) == REG &&
4268 GET_CODE (XEXP (mem_addr, 1)) == SYMBOL_REF)
4270 index_rtx = XEXP (XEXP (XEXP (mem_addr, 0), 0), 0);
4271 offset_rtx = XEXP (mem_addr, 1);
4272 base_rtx = XEXP (XEXP (mem_addr, 0), 1);
4273 offset_adjust = INTVAL (XEXP (XEXP (XEXP (mem_addr, 0), 0), 1));
4275 else
4277 offset_rtx = XEXP (mem_addr, 1);
4278 index_rtx = XEXP (XEXP (mem_addr, 0), 0);
4279 base_rtx = XEXP (XEXP (mem_addr, 0), 1);
4282 else if (GET_CODE (XEXP (mem_addr, 0)) == CONST_INT)
4284 was_only_offset = 1;
4285 index_rtx = NULL;
4286 base_rtx = NULL;
4287 offset_rtx = XEXP (mem_addr, 1);
4288 offset_adjust = INTVAL (XEXP (mem_addr, 0));
4289 if (offset_adjust == 0)
4291 XEXP (mem_rtx, 0) = offset_rtx;
4292 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
4293 return;
4296 else
4298 obfree (storage);
4299 return;
4302 else if (GET_CODE (mem_addr) == MULT)
4304 index_rtx = mem_addr;
4306 else
4308 obfree (storage);
4309 return;
4311 if (index_rtx && GET_CODE (index_rtx) == MULT)
4313 if (GET_CODE (XEXP (index_rtx, 1)) != CONST_INT)
4315 obfree (storage);
4316 return;
4318 scale_rtx = XEXP (index_rtx, 1);
4319 scale = INTVAL (scale_rtx);
4320 index_rtx = copy_all_rtx (XEXP (index_rtx, 0));
4322 /* now find which of the elements are invalid and try to fix them */
4323 if (index_rtx && GET_CODE (index_rtx) == CONST_INT && base_rtx == NULL)
4325 offset_adjust = INTVAL (index_rtx) * scale;
4326 if (offset_rtx && GET_CODE (offset_rtx) == CONST &&
4327 GET_CODE (XEXP (offset_rtx, 0)) == PLUS)
4329 if (GET_CODE (XEXP (XEXP (offset_rtx, 0), 0)) == SYMBOL_REF &&
4330 GET_CODE (XEXP (XEXP (offset_rtx, 0), 1)) == CONST_INT)
4332 offset_rtx = copy_all_rtx (offset_rtx);
4333 XEXP (XEXP (offset_rtx, 0), 1) =
4334 gen_rtx (CONST_INT, 0, INTVAL (XEXP (XEXP (offset_rtx, 0), 1)) + offset_adjust);
4335 if (!CONSTANT_P (offset_rtx))
4337 obfree (storage);
4338 return;
4342 else if (offset_rtx && GET_CODE (offset_rtx) == SYMBOL_REF)
4344 offset_rtx =
4345 gen_rtx (CONST, GET_MODE (offset_rtx),
4346 gen_rtx (PLUS, GET_MODE (offset_rtx),
4347 offset_rtx,
4348 gen_rtx (CONST_INT, 0, offset_adjust)));
4349 if (!CONSTANT_P (offset_rtx))
4351 obfree (storage);
4352 return;
4355 else if (offset_rtx && GET_CODE (offset_rtx) == CONST_INT)
4357 offset_rtx = gen_rtx (CONST_INT, 0, INTVAL (offset_rtx) + offset_adjust);
4359 else if (!offset_rtx)
4361 offset_rtx = gen_rtx (CONST_INT, 0, 0);
4363 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
4364 XEXP (mem_rtx, 0) = offset_rtx;
4365 return;
4367 if (base_rtx && GET_CODE (base_rtx) == PLUS &&
4368 GET_CODE (XEXP (base_rtx, 0)) == REG &&
4369 GET_CODE (XEXP (base_rtx, 1)) == CONST_INT)
4371 offset_adjust += INTVAL (XEXP (base_rtx, 1));
4372 base_rtx = copy_all_rtx (XEXP (base_rtx, 0));
4374 else if (base_rtx && GET_CODE (base_rtx) == CONST_INT)
4376 offset_adjust += INTVAL (base_rtx);
4377 base_rtx = NULL;
4379 if (index_rtx && GET_CODE (index_rtx) == PLUS &&
4380 GET_CODE (XEXP (index_rtx, 0)) == REG &&
4381 GET_CODE (XEXP (index_rtx, 1)) == CONST_INT)
4383 offset_adjust += INTVAL (XEXP (index_rtx, 1)) * scale;
4384 index_rtx = copy_all_rtx (XEXP (index_rtx, 0));
4386 if (index_rtx)
4388 if (!LEGITIMATE_INDEX_P (index_rtx)
4389 && !(index_rtx == stack_pointer_rtx && scale == 1 && base_rtx == NULL))
4391 obfree (storage);
4392 return;
4395 if (base_rtx)
4397 if (!LEGITIMATE_INDEX_P (base_rtx) && GET_CODE (base_rtx) != REG)
4399 obfree (storage);
4400 return;
4403 if (offset_adjust != 0)
4405 if (offset_rtx)
4407 if (GET_CODE (offset_rtx) == CONST &&
4408 GET_CODE (XEXP (offset_rtx, 0)) == PLUS)
4410 if (GET_CODE (XEXP (XEXP (offset_rtx, 0), 0)) == SYMBOL_REF &&
4411 GET_CODE (XEXP (XEXP (offset_rtx, 0), 1)) == CONST_INT)
4413 offset_rtx = copy_all_rtx (offset_rtx);
4414 XEXP (XEXP (offset_rtx, 0), 1) =
4415 gen_rtx (CONST_INT, 0, INTVAL (XEXP (XEXP (offset_rtx, 0), 1)) + offset_adjust);
4416 if (!CONSTANT_P (offset_rtx))
4418 obfree (storage);
4419 return;
4423 else if (GET_CODE (offset_rtx) == SYMBOL_REF)
4425 offset_rtx =
4426 gen_rtx (CONST, GET_MODE (offset_rtx),
4427 gen_rtx (PLUS, GET_MODE (offset_rtx),
4428 offset_rtx,
4429 gen_rtx (CONST_INT, 0, offset_adjust)));
4430 if (!CONSTANT_P (offset_rtx))
4432 obfree (storage);
4433 return;
4436 else if (GET_CODE (offset_rtx) == CONST_INT)
4438 offset_rtx = gen_rtx (CONST_INT, 0, INTVAL (offset_rtx) + offset_adjust);
4440 else
4442 obfree (storage);
4443 return;
4446 else
4448 offset_rtx = gen_rtx (CONST_INT, 0, offset_adjust);
4450 if (index_rtx)
4452 if (base_rtx)
4454 if (scale != 1)
4456 if (GET_CODE (offset_rtx) == CONST_INT &&
4457 INTVAL (offset_rtx) == 0)
4459 ret_rtx = gen_rtx (PLUS, GET_MODE (base_rtx),
4460 gen_rtx (MULT, GET_MODE (index_rtx), index_rtx,
4461 scale_rtx),
4462 base_rtx);
4464 else
4466 ret_rtx = gen_rtx (PLUS, GET_MODE (offset_rtx),
4467 gen_rtx (PLUS, GET_MODE (base_rtx),
4468 gen_rtx (MULT, GET_MODE (index_rtx), index_rtx,
4469 scale_rtx),
4470 base_rtx),
4471 offset_rtx);
4474 else
4476 if (GET_CODE (offset_rtx) == CONST_INT &&
4477 INTVAL (offset_rtx) == 0)
4479 ret_rtx = gen_rtx (PLUS, GET_MODE (index_rtx), index_rtx, base_rtx);
4481 else
4483 ret_rtx = gen_rtx (PLUS, GET_MODE (offset_rtx),
4484 gen_rtx (PLUS, GET_MODE (index_rtx), index_rtx,
4485 base_rtx),
4486 offset_rtx);
4490 else
4492 if (scale != 1)
4494 if (GET_CODE (offset_rtx) == CONST_INT &&
4495 INTVAL (offset_rtx) == 0)
4497 ret_rtx = gen_rtx (MULT, GET_MODE (index_rtx), index_rtx, scale_rtx);
4499 else
4501 ret_rtx =
4502 gen_rtx (PLUS, GET_MODE (offset_rtx),
4503 gen_rtx (MULT, GET_MODE (index_rtx), index_rtx,
4504 scale_rtx),
4505 offset_rtx);
4508 else
4510 if (GET_CODE (offset_rtx) == CONST_INT &&
4511 INTVAL (offset_rtx) == 0)
4513 ret_rtx = index_rtx;
4515 else
4517 ret_rtx = gen_rtx (PLUS, GET_MODE (index_rtx), index_rtx, offset_rtx);
4522 else
4524 if (base_rtx)
4526 if (GET_CODE (offset_rtx) == CONST_INT &&
4527 INTVAL (offset_rtx) == 0)
4529 ret_rtx = base_rtx;
4531 else
4533 ret_rtx = gen_rtx (PLUS, GET_MODE (base_rtx), base_rtx, offset_rtx);
4536 else if (was_only_offset)
4538 ret_rtx = offset_rtx;
4540 else
4542 obfree (storage);
4543 return;
4546 XEXP (mem_rtx, 0) = ret_rtx;
4547 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
4548 return;
4550 else
4552 obfree (storage);
4553 return;
4556 #endif /* NOTYET */
4559 /* return 1 if the first insn to set cc before insn also sets the register
4560 reg_rtx - otherwise return 0 */
4562 last_to_set_cc (reg_rtx, insn)
4563 rtx reg_rtx, insn;
4565 rtx prev_insn = PREV_INSN (insn);
4567 while (prev_insn)
4569 if (GET_CODE (prev_insn) == NOTE)
4572 else if (GET_CODE (prev_insn) == INSN)
4574 if (GET_CODE (PATTERN (prev_insn)) != SET)
4575 return (0);
4577 if (rtx_equal_p (SET_DEST (PATTERN (prev_insn)), reg_rtx))
4579 if (sets_condition_code (SET_SRC (PATTERN (prev_insn))))
4580 return (1);
4582 return (0);
4585 else if (!doesnt_set_condition_code (SET_SRC (PATTERN (prev_insn))))
4586 return (0);
4589 else
4590 return (0);
4592 prev_insn = PREV_INSN (prev_insn);
4595 return (0);
4600 doesnt_set_condition_code (pat)
4601 rtx pat;
4603 switch (GET_CODE (pat))
4605 case MEM:
4606 case REG:
4607 return (1);
4609 default:
4610 return (0);
4617 sets_condition_code (pat)
4618 rtx pat;
4620 switch (GET_CODE (pat))
4622 case PLUS:
4623 case MINUS:
4624 case AND:
4625 case IOR:
4626 case XOR:
4627 case NOT:
4628 case NEG:
4629 case MULT:
4630 case DIV:
4631 case MOD:
4632 case UDIV:
4633 case UMOD:
4634 return (1);
4636 default:
4637 return (0);
4644 str_immediate_operand (op, mode)
4645 register rtx op;
4646 enum machine_mode mode;
4648 if (GET_CODE (op) == CONST_INT && INTVAL (op) <= 32 && INTVAL (op) >= 0)
4650 return (1);
4652 return (0);
4657 is_fp_insn (insn)
4658 rtx insn;
4660 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
4661 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
4662 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
4663 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode))
4665 return (1);
4668 return (0);
4672 Return 1 if the mode of the SET_DEST of insn is floating point
4673 and it is not an fld or a move from memory to memory.
4674 Otherwise return 0 */
4676 is_fp_dest (insn)
4677 rtx insn;
4679 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
4680 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
4681 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
4682 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode)
4683 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
4684 && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_FLOAT_REG
4685 && GET_CODE (SET_SRC (insn)) != MEM)
4687 return (1);
4690 return (0);
4694 Return 1 if the mode of the SET_DEST floating point and is memory
4695 and the source is a register.
4698 is_fp_store (insn)
4699 rtx insn;
4701 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
4702 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
4703 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
4704 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode)
4705 && GET_CODE (SET_DEST (PATTERN (insn))) == MEM
4706 && GET_CODE (SET_SRC (PATTERN (insn))) == REG)
4708 return (1);
4711 return (0);
4716 Return 1 if dep_insn sets a register which insn uses as a base
4717 or index to reference memory.
4718 otherwise return 0 */
4721 agi_dependent (insn, dep_insn)
4722 rtx insn, dep_insn;
4724 if (GET_CODE (dep_insn) == INSN
4725 && GET_CODE (PATTERN (dep_insn)) == SET
4726 && GET_CODE (SET_DEST (PATTERN (dep_insn))) == REG)
4728 return (reg_mentioned_in_mem (SET_DEST (PATTERN (dep_insn)), insn));
4731 if (GET_CODE (dep_insn) == INSN && GET_CODE (PATTERN (dep_insn)) == SET
4732 && GET_CODE (SET_DEST (PATTERN (dep_insn))) == MEM
4733 && push_operand (SET_DEST (PATTERN (dep_insn)),
4734 GET_MODE (SET_DEST (PATTERN (dep_insn)))))
4736 return (reg_mentioned_in_mem (stack_pointer_rtx, insn));
4739 return (0);
4744 Return 1 if reg is used in rtl as a base or index for a memory ref
4745 otherwise return 0. */
4748 reg_mentioned_in_mem (reg, rtl)
4749 rtx reg, rtl;
4751 register char *fmt;
4752 register int i;
4753 register enum rtx_code code;
4755 if (rtl == NULL)
4756 return (0);
4758 code = GET_CODE (rtl);
4760 switch (code)
4762 case HIGH:
4763 case CONST_INT:
4764 case CONST:
4765 case CONST_DOUBLE:
4766 case SYMBOL_REF:
4767 case LABEL_REF:
4768 case PC:
4769 case CC0:
4770 case SUBREG:
4771 return (0);
4776 if (code == MEM && reg_mentioned_p (reg, rtl))
4777 return (1);
4779 fmt = GET_RTX_FORMAT (code);
4780 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4782 if (fmt[i] == 'E')
4784 register int j;
4785 for (j = XVECLEN (rtl, i) - 1; j >= 0; j--)
4787 if (reg_mentioned_in_mem (reg, XVECEXP (rtl, i, j)))
4788 return 1;
4792 else if (fmt[i] == 'e' && reg_mentioned_in_mem (reg, XEXP (rtl, i)))
4793 return 1;
4796 return (0);
4799 /* Output the approprate insns for doing strlen if not just doing repnz; scasb
4801 operands[0] = result, initialized with the startaddress
4802 operands[1] = alignment of the address.
4803 operands[2] = scratch register, initialized with the startaddress when
4804 not aligned, otherwise undefined
4806 This is just the body. It needs the initialisations mentioned above and
4807 some address computing at the end. These things are done in i386.md. */
4809 char *
4810 output_strlen_unroll (operands)
4811 rtx operands[];
4813 rtx xops[18];
4815 xops[0] = operands[0]; /* Result */
4816 /* operands[1]; * Alignment */
4817 xops[1] = operands[2]; /* Scratch */
4818 xops[2] = GEN_INT (0);
4819 xops[3] = GEN_INT (2);
4820 xops[4] = GEN_INT (3);
4821 xops[5] = GEN_INT (4);
4822 /* xops[6] = gen_label_rtx (); * label when aligned to 3-byte */
4823 /* xops[7] = gen_label_rtx (); * label when aligned to 2-byte */
4824 xops[8] = gen_label_rtx (); /* label of main loop */
4825 if(TARGET_USE_Q_REG && QI_REG_P (xops[1]))
4826 xops[9] = gen_label_rtx (); /* pentium optimisation */
4827 xops[10] = gen_label_rtx (); /* end label 2 */
4828 xops[11] = gen_label_rtx (); /* end label 1 */
4829 xops[12] = gen_label_rtx (); /* end label */
4830 /* xops[13] * Temporary used */
4831 xops[14] = GEN_INT (0xff);
4832 xops[15] = GEN_INT (0xff00);
4833 xops[16] = GEN_INT (0xff0000);
4834 xops[17] = GEN_INT (0xff000000);
4836 /* Loop to check 1..3 bytes for null to get an aligned pointer */
4838 /* is there a known alignment and is it less then 4 */
4839 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) < 4)
4841 /* is there a known alignment and is it not 2 */
4842 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) != 2)
4844 xops[6] = gen_label_rtx (); /* label when aligned to 3-byte */
4845 xops[7] = gen_label_rtx (); /* label when aligned to 2-byte */
4847 /* leave just the 3 lower bits */
4848 /* if this is a q-register, then the high part is used later */
4849 /* therefore user andl rather than andb */
4850 output_asm_insn (AS2 (and%L1,%4,%1), xops);
4851 /* is aligned to 4-byte adress when zero */
4852 output_asm_insn (AS1 (je,%l8), xops);
4853 /* side-effect even Parity when %eax == 3 */
4854 output_asm_insn (AS1 (jp,%6), xops);
4856 /* is it aligned to 2 bytes ? */
4857 if (QI_REG_P (xops[1]))
4858 output_asm_insn (AS2 (cmp%L1,%3,%1), xops);
4859 else
4860 output_asm_insn (AS2 (cmp%L1,%3,%1), xops);
4861 output_asm_insn (AS1 (je,%7), xops);
4863 else
4865 /* since the alignment is 2, we have to check 2 or 0 bytes */
4867 /* check if is aligned to 4 - byte */
4868 output_asm_insn (AS2 (and%L1,%3,%1), xops);
4869 /* is aligned to 4-byte adress when zero */
4870 output_asm_insn (AS1 (je,%l8), xops);
4873 xops[13] = gen_rtx (MEM, QImode, xops[0]);
4874 /* now, compare the bytes */
4875 /* compare with the high part of a q-reg gives shorter code */
4876 if (QI_REG_P (xops[1]))
4878 /* compare the first n unaligned byte on a byte per byte basis */
4879 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
4880 /* when zero we reached the end */
4881 output_asm_insn (AS1 (je,%l12), xops);
4882 /* increment the address */
4883 output_asm_insn (AS1 (inc%L0,%0), xops);
4885 /* not needed with an alignment of 2 */
4886 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) != 2)
4888 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[7]));
4889 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
4890 output_asm_insn (AS1 (je,%l12), xops);
4891 output_asm_insn (AS1 (inc%L0,%0), xops);
4893 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[6]));
4895 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
4897 else
4899 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
4900 output_asm_insn (AS1 (je,%l12), xops);
4901 output_asm_insn (AS1 (inc%L0,%0), xops);
4903 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[7]));
4904 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
4905 output_asm_insn (AS1 (je,%l12), xops);
4906 output_asm_insn (AS1 (inc%L0,%0), xops);
4908 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[6]));
4909 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
4911 output_asm_insn (AS1 (je,%l12), xops);
4912 output_asm_insn (AS1 (inc%L0,%0), xops);
4915 /* Generate loop to check 4 bytes at a time */
4916 /* IMHO it is not a good idea to align this loop. It gives only */
4917 /* huge programs, but does not help to speed up */
4918 /* ASM_OUTPUT_LOOP_ALIGN (asm_out_file); */
4919 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[8]));
4921 xops[13] = gen_rtx (MEM, SImode, xops[0]);
4922 output_asm_insn (AS2 (mov%L1,%13,%1), xops);
4924 if (QI_REG_P (xops[1]))
4926 /* On i586 it is faster to combine the hi- and lo- part as
4927 a kind of lookahead. If anding both yields zero, then one
4928 of both *could* be zero, otherwise none of both is zero;
4929 this saves one instruction, on i486 this is slower
4930 tested with P-90, i486DX2-66, AMD486DX2-66 */
4931 if(TARGET_PENTIUM)
4933 output_asm_insn (AS2 (test%B1,%h1,%b1), xops);
4934 output_asm_insn (AS1 (jne,%l9), xops);
4937 /* check first byte */
4938 output_asm_insn (AS2 (test%B1,%b1,%b1), xops);
4939 output_asm_insn (AS1 (je,%l12), xops);
4941 /* check second byte */
4942 output_asm_insn (AS2 (test%B1,%h1,%h1), xops);
4943 output_asm_insn (AS1 (je,%l11), xops);
4945 if(TARGET_PENTIUM)
4946 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[9]));
4948 else
4950 /* check first byte */
4951 output_asm_insn (AS2 (test%L1,%14,%1), xops);
4952 output_asm_insn (AS1 (je,%l12), xops);
4954 /* check second byte */
4955 output_asm_insn (AS2 (test%L1,%15,%1), xops);
4956 output_asm_insn (AS1 (je,%l11), xops);
4959 /* check third byte */
4960 output_asm_insn (AS2 (test%L1,%16,%1), xops);
4961 output_asm_insn (AS1 (je,%l10), xops);
4963 /* check fourth byte and increment address */
4964 output_asm_insn (AS2 (add%L0,%5,%0), xops);
4965 output_asm_insn (AS2 (test%L1,%17,%1), xops);
4966 output_asm_insn (AS1 (jne,%l8), xops);
4968 /* now generate fixups when the compare stops within a 4-byte word */
4969 output_asm_insn (AS2 (sub%L0,%4,%0), xops);
4971 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[10]));
4972 output_asm_insn (AS1 (inc%L0,%0), xops);
4974 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[11]));
4975 output_asm_insn (AS1 (inc%L0,%0), xops);
4977 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[12]));
4979 RET;