* c-decl.c (grokdeclarator): Use ISO word.
[official-gcc.git] / gcc / config / rs6000 / rs6000.c
blobccb46d71d364a6a0641cffc2c2d8a58641240e95
1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "rtl.h"
26 #include "regs.h"
27 #include "hard-reg-set.h"
28 #include "real.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "flags.h"
33 #include "recog.h"
34 #include "obstack.h"
35 #include "tree.h"
36 #include "expr.h"
37 #include "optabs.h"
38 #include "except.h"
39 #include "function.h"
40 #include "output.h"
41 #include "basic-block.h"
42 #include "integrate.h"
43 #include "toplev.h"
44 #include "ggc.h"
45 #include "hashtab.h"
46 #include "tm_p.h"
47 #include "target.h"
48 #include "target-def.h"
49 #include "langhooks.h"
51 #ifndef TARGET_NO_PROTOTYPE
52 #define TARGET_NO_PROTOTYPE 0
53 #endif
55 extern int profile_block_flag;
57 #define min(A,B) ((A) < (B) ? (A) : (B))
58 #define max(A,B) ((A) > (B) ? (A) : (B))
60 /* Target cpu type */
62 enum processor_type rs6000_cpu;
63 struct rs6000_cpu_select rs6000_select[3] =
65 /* switch name, tune arch */
66 { (const char *)0, "--with-cpu=", 1, 1 },
67 { (const char *)0, "-mcpu=", 1, 1 },
68 { (const char *)0, "-mtune=", 1, 0 },
71 /* Size of long double */
72 const char *rs6000_long_double_size_string;
73 int rs6000_long_double_type_size;
75 /* Whether -mabi=altivec has appeared */
76 int rs6000_altivec_abi;
78 /* Set to non-zero once AIX common-mode calls have been defined. */
79 static int common_mode_defined;
81 /* Save information from a "cmpxx" operation until the branch or scc is
82 emitted. */
83 rtx rs6000_compare_op0, rs6000_compare_op1;
84 int rs6000_compare_fp_p;
86 /* Label number of label created for -mrelocatable, to call to so we can
87 get the address of the GOT section */
88 int rs6000_pic_labelno;
90 #ifdef USING_ELFOS_H
91 /* Which abi to adhere to */
92 const char *rs6000_abi_name = RS6000_ABI_NAME;
94 /* Semantics of the small data area */
95 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
97 /* Which small data model to use */
98 const char *rs6000_sdata_name = (char *)0;
100 /* Counter for labels which are to be placed in .fixup. */
101 int fixuplabelno = 0;
102 #endif
104 /* ABI enumeration available for subtarget to use. */
105 enum rs6000_abi rs6000_current_abi;
107 /* ABI string from -mabi= option. */
108 const char *rs6000_abi_string;
110 /* Debug flags */
111 const char *rs6000_debug_name;
112 int rs6000_debug_stack; /* debug stack applications */
113 int rs6000_debug_arg; /* debug argument handling */
115 /* Flag to say the TOC is initialized */
116 int toc_initialized;
117 char toc_label_name[10];
119 /* Alias set for saves and restores from the rs6000 stack. */
120 static int rs6000_sr_alias_set;
122 static void rs6000_add_gc_roots PARAMS ((void));
123 static int num_insns_constant_wide PARAMS ((HOST_WIDE_INT));
124 static rtx expand_block_move_mem PARAMS ((enum machine_mode, rtx, rtx));
125 static void validate_condition_mode
126 PARAMS ((enum rtx_code, enum machine_mode));
127 static rtx rs6000_generate_compare PARAMS ((enum rtx_code));
128 static void rs6000_maybe_dead PARAMS ((rtx));
129 static void rs6000_emit_stack_tie PARAMS ((void));
130 static void rs6000_frame_related PARAMS ((rtx, rtx, HOST_WIDE_INT, rtx, rtx));
131 static void rs6000_emit_allocate_stack PARAMS ((HOST_WIDE_INT, int));
132 static unsigned rs6000_hash_constant PARAMS ((rtx));
133 static unsigned toc_hash_function PARAMS ((const void *));
134 static int toc_hash_eq PARAMS ((const void *, const void *));
135 static int toc_hash_mark_entry PARAMS ((void **, void *));
136 static void toc_hash_mark_table PARAMS ((void *));
137 static int constant_pool_expr_1 PARAMS ((rtx, int *, int *));
138 static void rs6000_free_machine_status PARAMS ((struct function *));
139 static void rs6000_init_machine_status PARAMS ((struct function *));
140 static int rs6000_ra_ever_killed PARAMS ((void));
141 static tree rs6000_handle_longcall_attribute PARAMS ((tree *, tree, tree, int, bool *));
142 const struct attribute_spec rs6000_attribute_table[];
143 static void rs6000_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
144 static void rs6000_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
145 static rtx rs6000_emit_set_long_const PARAMS ((rtx,
146 HOST_WIDE_INT, HOST_WIDE_INT));
147 #if TARGET_ELF
148 static unsigned int rs6000_elf_section_type_flags PARAMS ((tree, const char *,
149 int));
150 static void rs6000_elf_asm_out_constructor PARAMS ((rtx, int));
151 static void rs6000_elf_asm_out_destructor PARAMS ((rtx, int));
152 #endif
153 #ifdef OBJECT_FORMAT_COFF
154 static void xcoff_asm_named_section PARAMS ((const char *, unsigned int));
155 #endif
156 static int rs6000_adjust_cost PARAMS ((rtx, rtx, rtx, int));
157 static int rs6000_adjust_priority PARAMS ((rtx, int));
158 static int rs6000_issue_rate PARAMS ((void));
160 static void rs6000_init_builtins PARAMS ((void));
161 static void altivec_init_builtins PARAMS ((void));
162 static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int));
163 static rtx altivec_expand_builtin PARAMS ((tree, rtx));
164 static rtx altivec_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx));
166 static void rs6000_parse_abi_options PARAMS ((void));
167 static int first_altivec_reg_to_save PARAMS ((void));
168 static unsigned int compute_vrsave_mask PARAMS ((void));
169 static void is_altivec_return_reg PARAMS ((rtx, void *));
170 int vrsave_operation PARAMS ((rtx, enum machine_mode));
171 static rtx generate_set_vrsave PARAMS ((rtx, rs6000_stack_t *));
173 /* Default register names. */
174 char rs6000_reg_names[][8] =
176 "0", "1", "2", "3", "4", "5", "6", "7",
177 "8", "9", "10", "11", "12", "13", "14", "15",
178 "16", "17", "18", "19", "20", "21", "22", "23",
179 "24", "25", "26", "27", "28", "29", "30", "31",
180 "0", "1", "2", "3", "4", "5", "6", "7",
181 "8", "9", "10", "11", "12", "13", "14", "15",
182 "16", "17", "18", "19", "20", "21", "22", "23",
183 "24", "25", "26", "27", "28", "29", "30", "31",
184 "mq", "lr", "ctr","ap",
185 "0", "1", "2", "3", "4", "5", "6", "7",
186 "xer",
187 /* AltiVec registers. */
188 "0", "1", "2", "3", "4", "5", "6", "7",
189 "8", "9", "10", "11", "12", "13", "14", "15",
190 "16", "17", "18", "19", "20", "21", "22", "23",
191 "24", "25", "26", "27", "28", "29", "30", "31",
192 "vrsave"
195 #ifdef TARGET_REGNAMES
196 static const char alt_reg_names[][8] =
198 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
199 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
200 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
201 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
202 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
203 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
204 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
205 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
206 "mq", "lr", "ctr", "ap",
207 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
208 "xer",
209 /* AltiVec registers. */
210 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
211 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
212 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
213 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
214 "%vrsave"
216 #endif
218 #ifndef MASK_STRICT_ALIGN
219 #define MASK_STRICT_ALIGN 0
220 #endif
222 /* Initialize the GCC target structure. */
223 #undef TARGET_ATTRIBUTE_TABLE
224 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
226 #undef TARGET_ASM_FUNCTION_PROLOGUE
227 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
228 #undef TARGET_ASM_FUNCTION_EPILOGUE
229 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
231 #if TARGET_ELF
232 #undef TARGET_SECTION_TYPE_FLAGS
233 #define TARGET_SECTION_TYPE_FLAGS rs6000_elf_section_type_flags
234 #endif
236 #undef TARGET_SCHED_ISSUE_RATE
237 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
238 #undef TARGET_SCHED_ADJUST_COST
239 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
240 #undef TARGET_SCHED_ADJUST_PRIORITY
241 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
243 #undef TARGET_INIT_BUILTINS
244 #define TARGET_INIT_BUILTINS rs6000_init_builtins
246 #undef TARGET_EXPAND_BUILTIN
247 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
249 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
250 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
252 struct gcc_target targetm = TARGET_INITIALIZER;
254 /* Override command line options. Mostly we process the processor
255 type and sometimes adjust other TARGET_ options. */
257 void
258 rs6000_override_options (default_cpu)
259 const char *default_cpu;
261 size_t i, j;
262 struct rs6000_cpu_select *ptr;
264 /* Simplify the entries below by making a mask for any POWER
265 variant and any PowerPC variant. */
267 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
268 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
269 | MASK_PPC_GFXOPT | MASK_POWERPC64)
270 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
272 static struct ptt
274 const char *const name; /* Canonical processor name. */
275 const enum processor_type processor; /* Processor type enum value. */
276 const int target_enable; /* Target flags to enable. */
277 const int target_disable; /* Target flags to disable. */
278 } const processor_target_table[]
279 = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
280 POWER_MASKS | POWERPC_MASKS},
281 {"power", PROCESSOR_POWER,
282 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
283 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
284 {"power2", PROCESSOR_POWER,
285 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
286 POWERPC_MASKS | MASK_NEW_MNEMONICS},
287 {"power3", PROCESSOR_PPC630,
288 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
289 POWER_MASKS | MASK_PPC_GPOPT},
290 {"powerpc", PROCESSOR_POWERPC,
291 MASK_POWERPC | MASK_NEW_MNEMONICS,
292 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
293 {"powerpc64", PROCESSOR_POWERPC64,
294 MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
295 POWER_MASKS | POWERPC_OPT_MASKS},
296 {"rios", PROCESSOR_RIOS1,
297 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
298 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
299 {"rios1", PROCESSOR_RIOS1,
300 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
301 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
302 {"rsc", PROCESSOR_PPC601,
303 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
304 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
305 {"rsc1", PROCESSOR_PPC601,
306 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
307 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
308 {"rios2", PROCESSOR_RIOS2,
309 MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
310 POWERPC_MASKS | MASK_NEW_MNEMONICS},
311 {"rs64a", PROCESSOR_RS64A,
312 MASK_POWERPC | MASK_NEW_MNEMONICS,
313 POWER_MASKS | POWERPC_OPT_MASKS},
314 {"401", PROCESSOR_PPC403,
315 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
316 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
317 {"403", PROCESSOR_PPC403,
318 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
319 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
320 {"505", PROCESSOR_MPCCORE,
321 MASK_POWERPC | MASK_NEW_MNEMONICS,
322 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
323 {"601", PROCESSOR_PPC601,
324 MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
325 MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
326 {"602", PROCESSOR_PPC603,
327 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
328 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
329 {"603", PROCESSOR_PPC603,
330 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
331 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
332 {"603e", PROCESSOR_PPC603,
333 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
334 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
335 {"ec603e", PROCESSOR_PPC603,
336 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
337 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
338 {"604", PROCESSOR_PPC604,
339 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
340 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
341 {"604e", PROCESSOR_PPC604e,
342 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
343 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
344 {"620", PROCESSOR_PPC620,
345 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
346 POWER_MASKS | MASK_PPC_GPOPT},
347 {"630", PROCESSOR_PPC630,
348 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
349 POWER_MASKS | MASK_PPC_GPOPT},
350 {"740", PROCESSOR_PPC750,
351 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
352 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
353 {"750", PROCESSOR_PPC750,
354 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
355 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
356 {"7400", PROCESSOR_PPC7400,
357 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
358 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
359 {"7450", PROCESSOR_PPC7450,
360 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
361 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
362 {"801", PROCESSOR_MPCCORE,
363 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
364 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
365 {"821", PROCESSOR_MPCCORE,
366 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
367 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
368 {"823", PROCESSOR_MPCCORE,
369 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
370 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
371 {"860", PROCESSOR_MPCCORE,
372 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
373 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64}};
375 size_t ptt_size = sizeof (processor_target_table) / sizeof (struct ptt);
377 /* Save current -mmultiple/-mno-multiple status. */
378 int multiple = TARGET_MULTIPLE;
379 /* Save current -mstring/-mno-string status. */
380 int string = TARGET_STRING;
382 profile_block_flag = 0;
384 /* Identify the processor type. */
385 rs6000_select[0].string = default_cpu;
386 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
388 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
390 ptr = &rs6000_select[i];
391 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
393 for (j = 0; j < ptt_size; j++)
394 if (! strcmp (ptr->string, processor_target_table[j].name))
396 if (ptr->set_tune_p)
397 rs6000_cpu = processor_target_table[j].processor;
399 if (ptr->set_arch_p)
401 target_flags |= processor_target_table[j].target_enable;
402 target_flags &= ~processor_target_table[j].target_disable;
404 break;
407 if (j == ptt_size)
408 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
412 /* If we are optimizing big endian systems for space, use the store
413 multiple instructions. */
414 if (BYTES_BIG_ENDIAN && optimize_size)
415 target_flags |= MASK_MULTIPLE;
417 /* If -mmultiple or -mno-multiple was explicitly used, don't
418 override with the processor default */
419 if (TARGET_MULTIPLE_SET)
420 target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
422 /* If -mstring or -mno-string was explicitly used, don't override
423 with the processor default. */
424 if (TARGET_STRING_SET)
425 target_flags = (target_flags & ~MASK_STRING) | string;
427 /* Don't allow -mmultiple or -mstring on little endian systems
428 unless the cpu is a 750, because the hardware doesn't support the
429 instructions used in little endian mode, and causes an alignment
430 trap. The 750 does not cause an alignment trap (except when the
431 target is unaligned). */
433 if (! BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
435 if (TARGET_MULTIPLE)
437 target_flags &= ~MASK_MULTIPLE;
438 if (TARGET_MULTIPLE_SET)
439 warning ("-mmultiple is not supported on little endian systems");
442 if (TARGET_STRING)
444 target_flags &= ~MASK_STRING;
445 if (TARGET_STRING_SET)
446 warning ("-mstring is not supported on little endian systems");
450 if (flag_pic && DEFAULT_ABI == ABI_AIX)
452 warning ("-f%s ignored (all code is position independent)",
453 (flag_pic > 1) ? "PIC" : "pic");
454 flag_pic = 0;
457 #ifdef XCOFF_DEBUGGING_INFO
458 if (flag_function_sections && (write_symbols != NO_DEBUG)
459 && DEFAULT_ABI == ABI_AIX)
461 warning ("-ffunction-sections disabled on AIX when debugging");
462 flag_function_sections = 0;
465 if (flag_data_sections && (DEFAULT_ABI == ABI_AIX))
467 warning ("-fdata-sections not supported on AIX");
468 flag_data_sections = 0;
470 #endif
472 /* Set debug flags */
473 if (rs6000_debug_name)
475 if (! strcmp (rs6000_debug_name, "all"))
476 rs6000_debug_stack = rs6000_debug_arg = 1;
477 else if (! strcmp (rs6000_debug_name, "stack"))
478 rs6000_debug_stack = 1;
479 else if (! strcmp (rs6000_debug_name, "arg"))
480 rs6000_debug_arg = 1;
481 else
482 error ("unknown -mdebug-%s switch", rs6000_debug_name);
485 /* Set size of long double */
486 rs6000_long_double_type_size = 64;
487 if (rs6000_long_double_size_string)
489 char *tail;
490 int size = strtol (rs6000_long_double_size_string, &tail, 10);
491 if (*tail != '\0' || (size != 64 && size != 128))
492 error ("Unknown switch -mlong-double-%s",
493 rs6000_long_double_size_string);
494 else
495 rs6000_long_double_type_size = size;
498 /* Handle -mabi= options. */
499 rs6000_parse_abi_options ();
501 #ifdef TARGET_REGNAMES
502 /* If the user desires alternate register names, copy in the
503 alternate names now. */
504 if (TARGET_REGNAMES)
505 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
506 #endif
508 #ifdef SUBTARGET_OVERRIDE_OPTIONS
509 SUBTARGET_OVERRIDE_OPTIONS;
510 #endif
512 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
513 If -maix-struct-return or -msvr4-struct-return was explicitly
514 used, don't override with the ABI default. */
515 if (!(target_flags & MASK_AIX_STRUCT_RET_SET))
517 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
518 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
519 else
520 target_flags |= MASK_AIX_STRUCT_RET;
523 /* Register global variables with the garbage collector. */
524 rs6000_add_gc_roots ();
526 /* Allocate an alias set for register saves & restores from stack. */
527 rs6000_sr_alias_set = new_alias_set ();
529 if (TARGET_TOC)
530 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
532 /* Arrange to save and restore machine status around nested functions. */
533 init_machine_status = rs6000_init_machine_status;
534 free_machine_status = rs6000_free_machine_status;
537 /* Handle -mabi= options. */
538 static void
539 rs6000_parse_abi_options ()
541 if (rs6000_abi_string == 0)
542 return;
543 else if (! strcmp (rs6000_abi_string, "altivec"))
544 rs6000_altivec_abi = 1;
545 else
546 error ("unknown ABI specified: '%s'", rs6000_abi_string);
549 void
550 optimization_options (level, size)
551 int level ATTRIBUTE_UNUSED;
552 int size ATTRIBUTE_UNUSED;
556 /* Do anything needed at the start of the asm file. */
558 void
559 rs6000_file_start (file, default_cpu)
560 FILE *file;
561 const char *default_cpu;
563 size_t i;
564 char buffer[80];
565 const char *start = buffer;
566 struct rs6000_cpu_select *ptr;
568 if (flag_verbose_asm)
570 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
571 rs6000_select[0].string = default_cpu;
573 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
575 ptr = &rs6000_select[i];
576 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
578 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
579 start = "";
583 #ifdef USING_ELFOS_H
584 switch (rs6000_sdata)
586 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
587 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
588 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
589 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
592 if (rs6000_sdata && g_switch_value)
594 fprintf (file, "%s -G %d", start, g_switch_value);
595 start = "";
597 #endif
599 if (*start == '\0')
600 putc ('\n', file);
605 /* Create a CONST_DOUBLE from a string. */
607 struct rtx_def *
608 rs6000_float_const (string, mode)
609 const char *string;
610 enum machine_mode mode;
612 REAL_VALUE_TYPE value;
613 value = REAL_VALUE_ATOF (string, mode);
614 return immed_real_const_1 (value, mode);
617 /* Return non-zero if this function is known to have a null epilogue. */
620 direct_return ()
622 if (reload_completed)
624 rs6000_stack_t *info = rs6000_stack_info ();
626 if (info->first_gp_reg_save == 32
627 && info->first_fp_reg_save == 64
628 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
629 && ! info->lr_save_p
630 && ! info->cr_save_p
631 && info->vrsave_mask == 0
632 && ! info->push_p)
633 return 1;
636 return 0;
639 /* Returns 1 always. */
642 any_operand (op, mode)
643 rtx op ATTRIBUTE_UNUSED;
644 enum machine_mode mode ATTRIBUTE_UNUSED;
646 return 1;
649 /* Returns 1 if op is the count register. */
651 count_register_operand (op, mode)
652 rtx op;
653 enum machine_mode mode ATTRIBUTE_UNUSED;
655 if (GET_CODE (op) != REG)
656 return 0;
658 if (REGNO (op) == COUNT_REGISTER_REGNUM)
659 return 1;
661 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
662 return 1;
664 return 0;
668 xer_operand (op, mode)
669 rtx op;
670 enum machine_mode mode ATTRIBUTE_UNUSED;
672 if (GET_CODE (op) != REG)
673 return 0;
675 if (XER_REGNO_P (REGNO (op)))
676 return 1;
678 return 0;
681 /* Return 1 if OP is a constant that can fit in a D field. */
684 short_cint_operand (op, mode)
685 rtx op;
686 enum machine_mode mode ATTRIBUTE_UNUSED;
688 return (GET_CODE (op) == CONST_INT
689 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
692 /* Similar for an unsigned D field. */
695 u_short_cint_operand (op, mode)
696 rtx op;
697 enum machine_mode mode ATTRIBUTE_UNUSED;
699 return (GET_CODE (op) == CONST_INT
700 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'));
703 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
706 non_short_cint_operand (op, mode)
707 rtx op;
708 enum machine_mode mode ATTRIBUTE_UNUSED;
710 return (GET_CODE (op) == CONST_INT
711 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
714 /* Returns 1 if OP is a CONST_INT that is a positive value
715 and an exact power of 2. */
718 exact_log2_cint_operand (op, mode)
719 rtx op;
720 enum machine_mode mode ATTRIBUTE_UNUSED;
722 return (GET_CODE (op) == CONST_INT
723 && INTVAL (op) > 0
724 && exact_log2 (INTVAL (op)) >= 0);
727 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
728 ctr, or lr). */
731 gpc_reg_operand (op, mode)
732 rtx op;
733 enum machine_mode mode;
735 return (register_operand (op, mode)
736 && (GET_CODE (op) != REG
737 || (REGNO (op) >= ARG_POINTER_REGNUM
738 && !XER_REGNO_P (REGNO (op)))
739 || REGNO (op) < MQ_REGNO));
742 /* Returns 1 if OP is either a pseudo-register or a register denoting a
743 CR field. */
746 cc_reg_operand (op, mode)
747 rtx op;
748 enum machine_mode mode;
750 return (register_operand (op, mode)
751 && (GET_CODE (op) != REG
752 || REGNO (op) >= FIRST_PSEUDO_REGISTER
753 || CR_REGNO_P (REGNO (op))));
756 /* Returns 1 if OP is either a pseudo-register or a register denoting a
757 CR field that isn't CR0. */
760 cc_reg_not_cr0_operand (op, mode)
761 rtx op;
762 enum machine_mode mode;
764 return (register_operand (op, mode)
765 && (GET_CODE (op) != REG
766 || REGNO (op) >= FIRST_PSEUDO_REGISTER
767 || CR_REGNO_NOT_CR0_P (REGNO (op))));
770 /* Returns 1 if OP is either a constant integer valid for a D-field or
771 a non-special register. If a register, it must be in the proper
772 mode unless MODE is VOIDmode. */
775 reg_or_short_operand (op, mode)
776 rtx op;
777 enum machine_mode mode;
779 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
782 /* Similar, except check if the negation of the constant would be
783 valid for a D-field. */
786 reg_or_neg_short_operand (op, mode)
787 rtx op;
788 enum machine_mode mode;
790 if (GET_CODE (op) == CONST_INT)
791 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
793 return gpc_reg_operand (op, mode);
796 /* Return 1 if the operand is either a register or an integer whose
797 high-order 16 bits are zero. */
800 reg_or_u_short_operand (op, mode)
801 rtx op;
802 enum machine_mode mode;
804 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
807 /* Return 1 is the operand is either a non-special register or ANY
808 constant integer. */
811 reg_or_cint_operand (op, mode)
812 rtx op;
813 enum machine_mode mode;
815 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
818 /* Return 1 is the operand is either a non-special register or ANY
819 32-bit signed constant integer. */
822 reg_or_arith_cint_operand (op, mode)
823 rtx op;
824 enum machine_mode mode;
826 return (gpc_reg_operand (op, mode)
827 || (GET_CODE (op) == CONST_INT
828 #if HOST_BITS_PER_WIDE_INT != 32
829 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
830 < (unsigned HOST_WIDE_INT) 0x100000000ll)
831 #endif
835 /* Return 1 is the operand is either a non-special register or a 32-bit
836 signed constant integer valid for 64-bit addition. */
839 reg_or_add_cint64_operand (op, mode)
840 rtx op;
841 enum machine_mode mode;
843 return (gpc_reg_operand (op, mode)
844 || (GET_CODE (op) == CONST_INT
845 && INTVAL (op) < 0x7fff8000
846 #if HOST_BITS_PER_WIDE_INT != 32
847 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
848 < 0x100000000ll)
849 #endif
853 /* Return 1 is the operand is either a non-special register or a 32-bit
854 signed constant integer valid for 64-bit subtraction. */
857 reg_or_sub_cint64_operand (op, mode)
858 rtx op;
859 enum machine_mode mode;
861 return (gpc_reg_operand (op, mode)
862 || (GET_CODE (op) == CONST_INT
863 && (- INTVAL (op)) < 0x7fff8000
864 #if HOST_BITS_PER_WIDE_INT != 32
865 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
866 < 0x100000000ll)
867 #endif
871 /* Return 1 is the operand is either a non-special register or ANY
872 32-bit unsigned constant integer. */
875 reg_or_logical_cint_operand (op, mode)
876 rtx op;
877 enum machine_mode mode;
879 if (GET_CODE (op) == CONST_INT)
881 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
883 if (GET_MODE_BITSIZE (mode) <= 32)
884 abort ();
886 if (INTVAL (op) < 0)
887 return 0;
890 return ((INTVAL (op) & GET_MODE_MASK (mode)
891 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
893 else if (GET_CODE (op) == CONST_DOUBLE)
895 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
896 || mode != DImode)
897 abort ();
899 return CONST_DOUBLE_HIGH (op) == 0;
901 else
902 return gpc_reg_operand (op, mode);
905 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
908 got_operand (op, mode)
909 rtx op;
910 enum machine_mode mode ATTRIBUTE_UNUSED;
912 return (GET_CODE (op) == SYMBOL_REF
913 || GET_CODE (op) == CONST
914 || GET_CODE (op) == LABEL_REF);
917 /* Return 1 if the operand is a simple references that can be loaded via
918 the GOT (labels involving addition aren't allowed). */
921 got_no_const_operand (op, mode)
922 rtx op;
923 enum machine_mode mode ATTRIBUTE_UNUSED;
925 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
928 /* Return the number of instructions it takes to form a constant in an
929 integer register. */
931 static int
932 num_insns_constant_wide (value)
933 HOST_WIDE_INT value;
935 /* signed constant loadable with {cal|addi} */
936 if (CONST_OK_FOR_LETTER_P (value, 'I'))
937 return 1;
939 /* constant loadable with {cau|addis} */
940 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
941 return 1;
943 #if HOST_BITS_PER_WIDE_INT == 64
944 else if (TARGET_POWERPC64)
946 HOST_WIDE_INT low = value & 0xffffffff;
947 HOST_WIDE_INT high = value >> 32;
949 low = (low ^ 0x80000000) - 0x80000000; /* sign extend */
951 if (high == 0 && (low & 0x80000000) == 0)
952 return 2;
954 else if (high == -1 && (low & 0x80000000) != 0)
955 return 2;
957 else if (! low)
958 return num_insns_constant_wide (high) + 1;
960 else
961 return (num_insns_constant_wide (high)
962 + num_insns_constant_wide (low) + 1);
964 #endif
966 else
967 return 2;
971 num_insns_constant (op, mode)
972 rtx op;
973 enum machine_mode mode;
975 if (GET_CODE (op) == CONST_INT)
977 #if HOST_BITS_PER_WIDE_INT == 64
978 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
979 && mask64_operand (op, mode))
980 return 2;
981 else
982 #endif
983 return num_insns_constant_wide (INTVAL (op));
986 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
988 long l;
989 REAL_VALUE_TYPE rv;
991 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
992 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
993 return num_insns_constant_wide ((HOST_WIDE_INT)l);
996 else if (GET_CODE (op) == CONST_DOUBLE)
998 HOST_WIDE_INT low;
999 HOST_WIDE_INT high;
1000 long l[2];
1001 REAL_VALUE_TYPE rv;
1002 int endian = (WORDS_BIG_ENDIAN == 0);
1004 if (mode == VOIDmode || mode == DImode)
1006 high = CONST_DOUBLE_HIGH (op);
1007 low = CONST_DOUBLE_LOW (op);
1009 else
1011 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1012 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1013 high = l[endian];
1014 low = l[1 - endian];
1017 if (TARGET_32BIT)
1018 return (num_insns_constant_wide (low)
1019 + num_insns_constant_wide (high));
1021 else
1023 if (high == 0 && (low & 0x80000000) == 0)
1024 return num_insns_constant_wide (low);
1026 else if (high == -1 && (low & 0x80000000) != 0)
1027 return num_insns_constant_wide (low);
1029 else if (mask64_operand (op, mode))
1030 return 2;
1032 else if (low == 0)
1033 return num_insns_constant_wide (high) + 1;
1035 else
1036 return (num_insns_constant_wide (high)
1037 + num_insns_constant_wide (low) + 1);
1041 else
1042 abort ();
1045 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1046 register with one instruction per word. We only do this if we can
1047 safely read CONST_DOUBLE_{LOW,HIGH}. */
1050 easy_fp_constant (op, mode)
1051 rtx op;
1052 enum machine_mode mode;
1054 if (GET_CODE (op) != CONST_DOUBLE
1055 || GET_MODE (op) != mode
1056 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1057 return 0;
1059 /* Consider all constants with -msoft-float to be easy. */
1060 if (TARGET_SOFT_FLOAT && mode != DImode)
1061 return 1;
1063 /* If we are using V.4 style PIC, consider all constants to be hard. */
1064 if (flag_pic && DEFAULT_ABI == ABI_V4)
1065 return 0;
1067 #ifdef TARGET_RELOCATABLE
1068 /* Similarly if we are using -mrelocatable, consider all constants
1069 to be hard. */
1070 if (TARGET_RELOCATABLE)
1071 return 0;
1072 #endif
1074 if (mode == DFmode)
1076 long k[2];
1077 REAL_VALUE_TYPE rv;
1079 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1080 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1082 return (num_insns_constant_wide ((HOST_WIDE_INT)k[0]) == 1
1083 && num_insns_constant_wide ((HOST_WIDE_INT)k[1]) == 1);
1086 else if (mode == SFmode)
1088 long l;
1089 REAL_VALUE_TYPE rv;
1091 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1092 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1094 return num_insns_constant_wide (l) == 1;
1097 else if (mode == DImode)
1098 return ((TARGET_POWERPC64
1099 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1100 || (num_insns_constant (op, DImode) <= 2));
1102 else if (mode == SImode)
1103 return 1;
1104 else
1105 abort ();
1108 /* Return 1 if the operand is 0.0. */
1110 zero_fp_constant (op, mode)
1111 rtx op;
1112 enum machine_mode mode;
1114 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1117 /* Return 1 if the operand is in volatile memory. Note that during
1118 the RTL generation phase, memory_operand does not return TRUE for
1119 volatile memory references. So this function allows us to
1120 recognize volatile references where its safe. */
1123 volatile_mem_operand (op, mode)
1124 rtx op;
1125 enum machine_mode mode;
1127 if (GET_CODE (op) != MEM)
1128 return 0;
1130 if (!MEM_VOLATILE_P (op))
1131 return 0;
1133 if (mode != GET_MODE (op))
1134 return 0;
1136 if (reload_completed)
1137 return memory_operand (op, mode);
1139 if (reload_in_progress)
1140 return strict_memory_address_p (mode, XEXP (op, 0));
1142 return memory_address_p (mode, XEXP (op, 0));
1145 /* Return 1 if the operand is an offsettable memory operand. */
1148 offsettable_mem_operand (op, mode)
1149 rtx op;
1150 enum machine_mode mode;
1152 return ((GET_CODE (op) == MEM)
1153 && offsettable_address_p (reload_completed || reload_in_progress,
1154 mode, XEXP (op, 0)));
1157 /* Return 1 if the operand is either an easy FP constant (see above) or
1158 memory. */
1161 mem_or_easy_const_operand (op, mode)
1162 rtx op;
1163 enum machine_mode mode;
1165 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1168 /* Return 1 if the operand is either a non-special register or an item
1169 that can be used as the operand of a `mode' add insn. */
1172 add_operand (op, mode)
1173 rtx op;
1174 enum machine_mode mode;
1176 if (GET_CODE (op) == CONST_INT)
1177 return (CONST_OK_FOR_LETTER_P (INTVAL(op), 'I')
1178 || CONST_OK_FOR_LETTER_P (INTVAL(op), 'L'));
1180 return gpc_reg_operand (op, mode);
1183 /* Return 1 if OP is a constant but not a valid add_operand. */
1186 non_add_cint_operand (op, mode)
1187 rtx op;
1188 enum machine_mode mode ATTRIBUTE_UNUSED;
1190 return (GET_CODE (op) == CONST_INT
1191 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000
1192 && ! CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1195 /* Return 1 if the operand is a non-special register or a constant that
1196 can be used as the operand of an OR or XOR insn on the RS/6000. */
1199 logical_operand (op, mode)
1200 rtx op;
1201 enum machine_mode mode;
1203 HOST_WIDE_INT opl, oph;
1205 if (gpc_reg_operand (op, mode))
1206 return 1;
1208 if (GET_CODE (op) == CONST_INT)
1210 opl = INTVAL (op) & GET_MODE_MASK (mode);
1212 #if HOST_BITS_PER_WIDE_INT <= 32
1213 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1214 return 0;
1215 #endif
1217 else if (GET_CODE (op) == CONST_DOUBLE)
1219 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1220 abort ();
1222 opl = CONST_DOUBLE_LOW (op);
1223 oph = CONST_DOUBLE_HIGH (op);
1224 if (oph != 0)
1225 return 0;
1227 else
1228 return 0;
1230 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1231 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1234 /* Return 1 if C is a constant that is not a logical operand (as
1235 above), but could be split into one. */
1238 non_logical_cint_operand (op, mode)
1239 rtx op;
1240 enum machine_mode mode;
1242 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1243 && ! logical_operand (op, mode)
1244 && reg_or_logical_cint_operand (op, mode));
1247 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1248 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1249 Reject all ones and all zeros, since these should have been optimized
1250 away and confuse the making of MB and ME. */
1253 mask_operand (op, mode)
1254 rtx op;
1255 enum machine_mode mode ATTRIBUTE_UNUSED;
1257 HOST_WIDE_INT c, lsb;
1259 if (GET_CODE (op) != CONST_INT)
1260 return 0;
1262 c = INTVAL (op);
1264 /* We don't change the number of transitions by inverting,
1265 so make sure we start with the LS bit zero. */
1266 if (c & 1)
1267 c = ~c;
1269 /* Reject all zeros or all ones. */
1270 if (c == 0)
1271 return 0;
1273 /* Find the first transition. */
1274 lsb = c & -c;
1276 /* Invert to look for a second transition. */
1277 c = ~c;
1279 /* Erase first transition. */
1280 c &= -lsb;
1282 /* Find the second transition (if any). */
1283 lsb = c & -c;
1285 /* Match if all the bits above are 1's (or c is zero). */
1286 return c == -lsb;
1289 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1290 It is if there are no more than one 1->0 or 0->1 transitions.
1291 Reject all ones and all zeros, since these should have been optimized
1292 away and confuse the making of MB and ME. */
1295 mask64_operand (op, mode)
1296 rtx op;
1297 enum machine_mode mode;
1299 if (GET_CODE (op) == CONST_INT)
1301 HOST_WIDE_INT c, lsb;
1303 /* We don't change the number of transitions by inverting,
1304 so make sure we start with the LS bit zero. */
1305 c = INTVAL (op);
1306 if (c & 1)
1307 c = ~c;
1309 /* Reject all zeros or all ones. */
1310 if (c == 0)
1311 return 0;
1313 /* Find the transition, and check that all bits above are 1's. */
1314 lsb = c & -c;
1315 return c == -lsb;
1317 else if (GET_CODE (op) == CONST_DOUBLE
1318 && (mode == VOIDmode || mode == DImode))
1320 HOST_WIDE_INT low, high, lsb;
1322 if (HOST_BITS_PER_WIDE_INT < 64)
1323 high = CONST_DOUBLE_HIGH (op);
1325 low = CONST_DOUBLE_LOW (op);
1326 if (low & 1)
1328 if (HOST_BITS_PER_WIDE_INT < 64)
1329 high = ~high;
1330 low = ~low;
1333 if (low == 0)
1335 if (HOST_BITS_PER_WIDE_INT >= 64 || high == 0)
1336 return 0;
1338 lsb = high & -high;
1339 return high == -lsb;
1342 lsb = low & -low;
1343 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
1345 else
1346 return 0;
1349 /* Return 1 if the operand is either a non-special register or a constant
1350 that can be used as the operand of a PowerPC64 logical AND insn. */
1353 and64_operand (op, mode)
1354 rtx op;
1355 enum machine_mode mode;
1357 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1358 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
1360 return (logical_operand (op, mode) || mask64_operand (op, mode));
1363 /* Return 1 if the operand is either a non-special register or a
1364 constant that can be used as the operand of an RS/6000 logical AND insn. */
1367 and_operand (op, mode)
1368 rtx op;
1369 enum machine_mode mode;
1371 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1372 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
1374 return (logical_operand (op, mode) || mask_operand (op, mode));
1377 /* Return 1 if the operand is a general register or memory operand. */
1380 reg_or_mem_operand (op, mode)
1381 rtx op;
1382 enum machine_mode mode;
1384 return (gpc_reg_operand (op, mode)
1385 || memory_operand (op, mode)
1386 || volatile_mem_operand (op, mode));
1389 /* Return 1 if the operand is a general register or memory operand without
1390 pre_inc or pre_dec which produces invalid form of PowerPC lwa
1391 instruction. */
1394 lwa_operand (op, mode)
1395 rtx op;
1396 enum machine_mode mode;
1398 rtx inner = op;
1400 if (reload_completed && GET_CODE (inner) == SUBREG)
1401 inner = SUBREG_REG (inner);
1403 return gpc_reg_operand (inner, mode)
1404 || (memory_operand (inner, mode)
1405 && GET_CODE (XEXP (inner, 0)) != PRE_INC
1406 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
1407 && (GET_CODE (XEXP (inner, 0)) != PLUS
1408 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
1409 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
1412 /* Return 1 if the operand, used inside a MEM, is a valid first argument
1413 to CALL. This is a SYMBOL_REF or a pseudo-register, which will be
1414 forced to lr. */
1417 call_operand (op, mode)
1418 rtx op;
1419 enum machine_mode mode;
1421 if (mode != VOIDmode && GET_MODE (op) != mode)
1422 return 0;
1424 return (GET_CODE (op) == SYMBOL_REF
1425 || (GET_CODE (op) == REG && REGNO (op) >= FIRST_PSEUDO_REGISTER));
1428 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
1429 this file and the function is not weakly defined. */
1432 current_file_function_operand (op, mode)
1433 rtx op;
1434 enum machine_mode mode ATTRIBUTE_UNUSED;
1436 return (GET_CODE (op) == SYMBOL_REF
1437 && (SYMBOL_REF_FLAG (op)
1438 || (op == XEXP (DECL_RTL (current_function_decl), 0)
1439 && ! DECL_WEAK (current_function_decl))));
1442 /* Return 1 if this operand is a valid input for a move insn. */
1445 input_operand (op, mode)
1446 rtx op;
1447 enum machine_mode mode;
1449 /* Memory is always valid. */
1450 if (memory_operand (op, mode))
1451 return 1;
1453 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
1454 if (GET_CODE (op) == CONSTANT_P_RTX)
1455 return 1;
1457 /* For floating-point, easy constants are valid. */
1458 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1459 && CONSTANT_P (op)
1460 && easy_fp_constant (op, mode))
1461 return 1;
1463 /* Allow any integer constant. */
1464 if (GET_MODE_CLASS (mode) == MODE_INT
1465 && (GET_CODE (op) == CONST_INT
1466 || GET_CODE (op) == CONST_DOUBLE))
1467 return 1;
1469 /* For floating-point or multi-word mode, the only remaining valid type
1470 is a register. */
1471 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1472 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1473 return register_operand (op, mode);
1475 /* The only cases left are integral modes one word or smaller (we
1476 do not get called for MODE_CC values). These can be in any
1477 register. */
1478 if (register_operand (op, mode))
1479 return 1;
1481 /* A SYMBOL_REF referring to the TOC is valid. */
1482 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (op))
1483 return 1;
1485 /* A constant pool expression (relative to the TOC) is valid */
1486 if (TOC_RELATIVE_EXPR_P (op))
1487 return 1;
1489 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
1490 to be valid. */
1491 if (DEFAULT_ABI == ABI_V4
1492 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
1493 && small_data_operand (op, Pmode))
1494 return 1;
1496 return 0;
1499 /* Return 1 for an operand in small memory on V.4/eabi. */
1502 small_data_operand (op, mode)
1503 rtx op ATTRIBUTE_UNUSED;
1504 enum machine_mode mode ATTRIBUTE_UNUSED;
1506 #if TARGET_ELF
1507 rtx sym_ref;
1509 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
1510 return 0;
1512 if (DEFAULT_ABI != ABI_V4)
1513 return 0;
1515 if (GET_CODE (op) == SYMBOL_REF)
1516 sym_ref = op;
1518 else if (GET_CODE (op) != CONST
1519 || GET_CODE (XEXP (op, 0)) != PLUS
1520 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
1521 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
1522 return 0;
1524 else
1526 rtx sum = XEXP (op, 0);
1527 HOST_WIDE_INT summand;
1529 /* We have to be careful here, because it is the referenced address
1530 that must be 32k from _SDA_BASE_, not just the symbol. */
1531 summand = INTVAL (XEXP (sum, 1));
1532 if (summand < 0 || summand > g_switch_value)
1533 return 0;
1535 sym_ref = XEXP (sum, 0);
1538 if (*XSTR (sym_ref, 0) != '@')
1539 return 0;
1541 return 1;
1543 #else
1544 return 0;
1545 #endif
1548 static int
1549 constant_pool_expr_1 (op, have_sym, have_toc)
1550 rtx op;
1551 int *have_sym;
1552 int *have_toc;
1554 switch (GET_CODE(op))
1556 case SYMBOL_REF:
1557 if (CONSTANT_POOL_ADDRESS_P (op))
1559 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
1561 *have_sym = 1;
1562 return 1;
1564 else
1565 return 0;
1567 else if (! strcmp (XSTR (op, 0), toc_label_name))
1569 *have_toc = 1;
1570 return 1;
1572 else
1573 return 0;
1574 case PLUS:
1575 case MINUS:
1576 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc) &&
1577 constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc);
1578 case CONST:
1579 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
1580 case CONST_INT:
1581 return 1;
1582 default:
1583 return 0;
1588 constant_pool_expr_p (op)
1589 rtx op;
1591 int have_sym = 0;
1592 int have_toc = 0;
1593 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
1597 toc_relative_expr_p (op)
1598 rtx op;
1600 int have_sym = 0;
1601 int have_toc = 0;
1602 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
1605 /* Try machine-dependent ways of modifying an illegitimate address
1606 to be legitimate. If we find one, return the new, valid address.
1607 This is used from only one place: `memory_address' in explow.c.
1609 OLDX is the address as it was before break_out_memory_refs was
1610 called. In some cases it is useful to look at this to decide what
1611 needs to be done.
1613 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
1615 It is always safe for this function to do nothing. It exists to
1616 recognize opportunities to optimize the output.
1618 On RS/6000, first check for the sum of a register with a constant
1619 integer that is out of range. If so, generate code to add the
1620 constant with the low-order 16 bits masked to the register and force
1621 this result into another register (this can be done with `cau').
1622 Then generate an address of REG+(CONST&0xffff), allowing for the
1623 possibility of bit 16 being a one.
1625 Then check for the sum of a register and something not constant, try to
1626 load the other things into a register and return the sum. */
1628 rs6000_legitimize_address (x, oldx, mode)
1629 rtx x;
1630 rtx oldx ATTRIBUTE_UNUSED;
1631 enum machine_mode mode;
1633 if (GET_CODE (x) == PLUS
1634 && GET_CODE (XEXP (x, 0)) == REG
1635 && GET_CODE (XEXP (x, 1)) == CONST_INT
1636 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
1638 HOST_WIDE_INT high_int, low_int;
1639 rtx sum;
1640 high_int = INTVAL (XEXP (x, 1)) & (~ (HOST_WIDE_INT) 0xffff);
1641 low_int = INTVAL (XEXP (x, 1)) & 0xffff;
1642 if (low_int & 0x8000)
1643 high_int += 0x10000, low_int |= ((HOST_WIDE_INT) -1) << 16;
1644 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
1645 GEN_INT (high_int)), 0);
1646 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
1648 else if (GET_CODE (x) == PLUS
1649 && GET_CODE (XEXP (x, 0)) == REG
1650 && GET_CODE (XEXP (x, 1)) != CONST_INT
1651 && GET_MODE_NUNITS (mode) == 1
1652 && (TARGET_HARD_FLOAT || TARGET_POWERPC64 || mode != DFmode)
1653 && (TARGET_POWERPC64 || mode != DImode)
1654 && mode != TImode)
1656 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
1657 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
1659 else if (ALTIVEC_VECTOR_MODE (mode))
1661 rtx reg;
1663 /* Make sure both operands are registers. */
1664 if (GET_CODE (x) == PLUS)
1665 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
1666 force_reg (Pmode, XEXP (x, 1)));
1668 reg = force_reg (Pmode, x);
1669 return reg;
1671 else if (TARGET_ELF && TARGET_32BIT && TARGET_NO_TOC && ! flag_pic
1672 && GET_CODE (x) != CONST_INT
1673 && GET_CODE (x) != CONST_DOUBLE
1674 && CONSTANT_P (x)
1675 && GET_MODE_NUNITS (mode) == 1
1676 && (GET_MODE_BITSIZE (mode) <= 32
1677 || (TARGET_HARD_FLOAT && mode != DFmode)))
1679 rtx reg = gen_reg_rtx (Pmode);
1680 emit_insn (gen_elf_high (reg, (x)));
1681 return gen_rtx_LO_SUM (Pmode, reg, (x));
1683 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
1684 && ! flag_pic
1685 && GET_CODE (x) != CONST_INT
1686 && GET_CODE (x) != CONST_DOUBLE
1687 && CONSTANT_P (x)
1688 && (TARGET_HARD_FLOAT || mode != DFmode)
1689 && mode != DImode
1690 && mode != TImode)
1692 rtx reg = gen_reg_rtx (Pmode);
1693 emit_insn (gen_macho_high (reg, (x)));
1694 return gen_rtx_LO_SUM (Pmode, reg, (x));
1696 else if (TARGET_TOC
1697 && CONSTANT_POOL_EXPR_P (x)
1698 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
1700 return create_TOC_reference (x);
1702 else
1703 return NULL_RTX;
1706 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
1707 that is a valid memory address for an instruction.
1708 The MODE argument is the machine mode for the MEM expression
1709 that wants to use this address.
1711 On the RS/6000, there are four valid address: a SYMBOL_REF that
1712 refers to a constant pool entry of an address (or the sum of it
1713 plus a constant), a short (16-bit signed) constant plus a register,
1714 the sum of two registers, or a register indirect, possibly with an
1715 auto-increment. For DFmode and DImode with an constant plus register,
1716 we must ensure that both words are addressable or PowerPC64 with offset
1717 word aligned.
1719 For modes spanning multiple registers (DFmode in 32-bit GPRs,
1720 32-bit DImode, TImode), indexed addressing cannot be used because
1721 adjacent memory cells are accessed by adding word-sized offsets
1722 during assembly output. */
1724 rs6000_legitimate_address (mode, x, reg_ok_strict)
1725 enum machine_mode mode;
1726 rtx x;
1727 int reg_ok_strict;
1729 if (LEGITIMATE_INDIRECT_ADDRESS_P (x, reg_ok_strict))
1730 return 1;
1731 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
1732 && TARGET_UPDATE
1733 && LEGITIMATE_INDIRECT_ADDRESS_P (XEXP (x, 0), reg_ok_strict))
1734 return 1;
1735 if (LEGITIMATE_SMALL_DATA_P (mode, x))
1736 return 1;
1737 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
1738 return 1;
1739 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
1740 if (! reg_ok_strict
1741 && GET_CODE (x) == PLUS
1742 && GET_CODE (XEXP (x, 0)) == REG
1743 && XEXP (x, 0) == virtual_stack_vars_rtx
1744 && GET_CODE (XEXP (x, 1)) == CONST_INT)
1745 return 1;
1746 if (LEGITIMATE_OFFSET_ADDRESS_P (mode, x, reg_ok_strict))
1747 return 1;
1748 if (mode != TImode
1749 && (TARGET_HARD_FLOAT || TARGET_POWERPC64 || mode != DFmode)
1750 && (TARGET_POWERPC64 || mode != DImode)
1751 && LEGITIMATE_INDEXED_ADDRESS_P (x, reg_ok_strict))
1752 return 1;
1753 if (LEGITIMATE_LO_SUM_ADDRESS_P (mode, x, reg_ok_strict))
1754 return 1;
1755 return 0;
1758 /* Try to output insns to set TARGET equal to the constant C if it can
1759 be done in less than N insns. Do all computations in MODE.
1760 Returns the place where the output has been placed if it can be
1761 done and the insns have been emitted. If it would take more than N
1762 insns, zero is returned and no insns and emitted. */
1765 rs6000_emit_set_const (dest, mode, source, n)
1766 rtx dest, source;
1767 enum machine_mode mode;
1768 int n ATTRIBUTE_UNUSED;
1770 HOST_WIDE_INT c0, c1;
1772 if (mode == QImode || mode == HImode || mode == SImode)
1774 if (dest == NULL)
1775 dest = gen_reg_rtx (mode);
1776 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
1777 return dest;
1780 if (GET_CODE (source) == CONST_INT)
1782 c0 = INTVAL (source);
1783 c1 = -(c0 < 0);
1785 else if (GET_CODE (source) == CONST_DOUBLE)
1787 #if HOST_BITS_PER_WIDE_INT >= 64
1788 c0 = CONST_DOUBLE_LOW (source);
1789 c1 = -(c0 < 0);
1790 #else
1791 c0 = CONST_DOUBLE_LOW (source);
1792 c1 = CONST_DOUBLE_HIGH (source);
1793 #endif
1795 else
1796 abort ();
1798 return rs6000_emit_set_long_const (dest, c0, c1);
1801 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
1802 fall back to a straight forward decomposition. We do this to avoid
1803 exponential run times encountered when looking for longer sequences
1804 with rs6000_emit_set_const. */
1805 static rtx
1806 rs6000_emit_set_long_const (dest, c1, c2)
1807 rtx dest;
1808 HOST_WIDE_INT c1, c2;
1810 if (!TARGET_POWERPC64)
1812 rtx operand1, operand2;
1814 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
1815 DImode);
1816 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
1817 DImode);
1818 emit_move_insn (operand1, GEN_INT (c1));
1819 emit_move_insn (operand2, GEN_INT (c2));
1821 else
1823 HOST_WIDE_INT d1, d2, d3, d4;
1825 /* Decompose the entire word */
1826 #if HOST_BITS_PER_WIDE_INT >= 64
1827 if (c2 != -(c1 < 0))
1828 abort ();
1829 d1 = ((c1 & 0xffff) ^ 0x8000) - 0x8000;
1830 c1 -= d1;
1831 d2 = ((c1 & 0xffffffff) ^ 0x80000000) - 0x80000000;
1832 c1 = (c1 - d2) >> 32;
1833 d3 = ((c1 & 0xffff) ^ 0x8000) - 0x8000;
1834 c1 -= d3;
1835 d4 = ((c1 & 0xffffffff) ^ 0x80000000) - 0x80000000;
1836 if (c1 != d4)
1837 abort ();
1838 #else
1839 d1 = ((c1 & 0xffff) ^ 0x8000) - 0x8000;
1840 c1 -= d1;
1841 d2 = ((c1 & 0xffffffff) ^ 0x80000000) - 0x80000000;
1842 if (c1 != d2)
1843 abort ();
1844 c2 += (d2 < 0);
1845 d3 = ((c2 & 0xffff) ^ 0x8000) - 0x8000;
1846 c2 -= d3;
1847 d4 = ((c2 & 0xffffffff) ^ 0x80000000) - 0x80000000;
1848 if (c2 != d4)
1849 abort ();
1850 #endif
1852 /* Construct the high word */
1853 if (d4 != 0)
1855 emit_move_insn (dest, GEN_INT (d4));
1856 if (d3 != 0)
1857 emit_move_insn (dest,
1858 gen_rtx_PLUS (DImode, dest, GEN_INT (d3)));
1860 else
1861 emit_move_insn (dest, GEN_INT (d3));
1863 /* Shift it into place */
1864 if (d3 != 0 || d4 != 0)
1865 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
1867 /* Add in the low bits. */
1868 if (d2 != 0)
1869 emit_move_insn (dest, gen_rtx_PLUS (DImode, dest, GEN_INT (d2)));
1870 if (d1 != 0)
1871 emit_move_insn (dest, gen_rtx_PLUS (DImode, dest, GEN_INT (d1)));
1874 return dest;
1877 /* Emit a move from SOURCE to DEST in mode MODE. */
1878 void
1879 rs6000_emit_move (dest, source, mode)
1880 rtx dest;
1881 rtx source;
1882 enum machine_mode mode;
1884 rtx operands[2];
1885 operands[0] = dest;
1886 operands[1] = source;
1888 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
1889 if (GET_CODE (operands[1]) == CONST_DOUBLE
1890 && ! FLOAT_MODE_P (mode)
1891 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1893 /* FIXME. This should never happen. */
1894 /* Since it seems that it does, do the safe thing and convert
1895 to a CONST_INT. */
1896 operands[1] =
1897 GEN_INT (trunc_int_for_mode (CONST_DOUBLE_LOW (operands[1]), mode));
1899 if (GET_CODE (operands[1]) == CONST_DOUBLE
1900 && ! FLOAT_MODE_P (mode)
1901 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
1902 && CONST_DOUBLE_LOW (operands[1]) >= 0)
1903 || (CONST_DOUBLE_HIGH (operands[1]) == -1
1904 && CONST_DOUBLE_LOW (operands[1]) < 0)))
1905 abort ();
1907 /* Check if GCC is setting up a block move that will end up using FP
1908 registers as temporaries. We must make sure this is acceptable. */
1909 if (GET_CODE (operands[0]) == MEM
1910 && GET_CODE (operands[1]) == MEM
1911 && mode == DImode
1912 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
1913 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
1914 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
1915 ? 32 : MEM_ALIGN (operands[0])))
1916 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
1917 ? 32
1918 : MEM_ALIGN (operands[1]))))
1919 && ! MEM_VOLATILE_P (operands [0])
1920 && ! MEM_VOLATILE_P (operands [1]))
1922 emit_move_insn (adjust_address (operands[0], SImode, 0),
1923 adjust_address (operands[1], SImode, 0));
1924 emit_move_insn (adjust_address (operands[0], SImode, 4),
1925 adjust_address (operands[1], SImode, 4));
1926 return;
1929 if (! no_new_pseudos && GET_CODE (operands[0]) != REG)
1930 operands[1] = force_reg (mode, operands[1]);
1932 if (mode == SFmode && ! TARGET_POWERPC && TARGET_HARD_FLOAT
1933 && GET_CODE (operands[0]) == MEM)
1935 int regnum;
1937 if (reload_in_progress || reload_completed)
1938 regnum = true_regnum (operands[1]);
1939 else if (GET_CODE (operands[1]) == REG)
1940 regnum = REGNO (operands[1]);
1941 else
1942 regnum = -1;
1944 /* If operands[1] is a register, on POWER it may have
1945 double-precision data in it, so truncate it to single
1946 precision. */
1947 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
1949 rtx newreg;
1950 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
1951 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
1952 operands[1] = newreg;
1956 /* Handle the case where reload calls us with an invalid address;
1957 and the case of CONSTANT_P_RTX. */
1958 if (! general_operand (operands[1], mode)
1959 || ! nonimmediate_operand (operands[0], mode)
1960 || GET_CODE (operands[1]) == CONSTANT_P_RTX)
1962 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
1963 return;
1966 /* FIXME: In the long term, this switch statement should go away
1967 and be replaced by a sequence of tests based on things like
1968 mode == Pmode. */
1969 switch (mode)
1971 case HImode:
1972 case QImode:
1973 if (CONSTANT_P (operands[1])
1974 && GET_CODE (operands[1]) != CONST_INT)
1975 operands[1] = force_const_mem (mode, operands[1]);
1976 break;
1978 case TFmode:
1979 case DFmode:
1980 case SFmode:
1981 if (CONSTANT_P (operands[1])
1982 && ! easy_fp_constant (operands[1], mode))
1983 operands[1] = force_const_mem (mode, operands[1]);
1984 break;
1986 case V16QImode:
1987 case V8HImode:
1988 case V4SFmode:
1989 case V4SImode:
1990 /* fixme: aldyh -- allow vector constants when they are implemented. */
1991 if (CONSTANT_P (operands[1]))
1992 operands[1] = force_const_mem (mode, operands[1]);
1993 break;
1995 case SImode:
1996 case DImode:
1997 /* Use default pattern for address of ELF small data */
1998 if (TARGET_ELF
1999 && mode == Pmode
2000 && DEFAULT_ABI == ABI_V4
2001 && (GET_CODE (operands[1]) == SYMBOL_REF
2002 || GET_CODE (operands[1]) == CONST)
2003 && small_data_operand (operands[1], mode))
2005 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2006 return;
2009 if (DEFAULT_ABI == ABI_V4
2010 && mode == Pmode && mode == SImode
2011 && flag_pic == 1 && got_operand (operands[1], mode))
2013 emit_insn (gen_movsi_got (operands[0], operands[1]));
2014 return;
2017 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
2018 && TARGET_NO_TOC && ! flag_pic
2019 && mode == Pmode
2020 && CONSTANT_P (operands[1])
2021 && GET_CODE (operands[1]) != HIGH
2022 && GET_CODE (operands[1]) != CONST_INT)
2024 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
2026 /* If this is a function address on -mcall-aixdesc,
2027 convert it to the address of the descriptor. */
2028 if (DEFAULT_ABI == ABI_AIX
2029 && GET_CODE (operands[1]) == SYMBOL_REF
2030 && XSTR (operands[1], 0)[0] == '.')
2032 const char *name = XSTR (operands[1], 0);
2033 rtx new_ref;
2034 while (*name == '.')
2035 name++;
2036 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
2037 CONSTANT_POOL_ADDRESS_P (new_ref)
2038 = CONSTANT_POOL_ADDRESS_P (operands[1]);
2039 SYMBOL_REF_FLAG (new_ref) = SYMBOL_REF_FLAG (operands[1]);
2040 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
2041 operands[1] = new_ref;
2044 if (DEFAULT_ABI == ABI_DARWIN)
2046 emit_insn (gen_macho_high (target, operands[1]));
2047 emit_insn (gen_macho_low (operands[0], target, operands[1]));
2048 return;
2051 emit_insn (gen_elf_high (target, operands[1]));
2052 emit_insn (gen_elf_low (operands[0], target, operands[1]));
2053 return;
2056 /* If this is a SYMBOL_REF that refers to a constant pool entry,
2057 and we have put it in the TOC, we just need to make a TOC-relative
2058 reference to it. */
2059 if (TARGET_TOC
2060 && GET_CODE (operands[1]) == SYMBOL_REF
2061 && CONSTANT_POOL_EXPR_P (operands[1])
2062 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
2063 get_pool_mode (operands[1])))
2065 operands[1] = create_TOC_reference (operands[1]);
2067 else if (mode == Pmode
2068 && CONSTANT_P (operands[1])
2069 && ((GET_CODE (operands[1]) != CONST_INT
2070 && ! easy_fp_constant (operands[1], mode))
2071 || (GET_CODE (operands[1]) == CONST_INT
2072 && num_insns_constant (operands[1], mode) > 2)
2073 || (GET_CODE (operands[0]) == REG
2074 && FP_REGNO_P (REGNO (operands[0]))))
2075 && GET_CODE (operands[1]) != HIGH
2076 && ! LEGITIMATE_CONSTANT_POOL_ADDRESS_P (operands[1])
2077 && ! TOC_RELATIVE_EXPR_P (operands[1]))
2079 /* Emit a USE operation so that the constant isn't deleted if
2080 expensive optimizations are turned on because nobody
2081 references it. This should only be done for operands that
2082 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
2083 This should not be done for operands that contain LABEL_REFs.
2084 For now, we just handle the obvious case. */
2085 if (GET_CODE (operands[1]) != LABEL_REF)
2086 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
2088 /* Darwin uses a special PIC legitimizer. */
2089 if (DEFAULT_ABI == ABI_DARWIN && flag_pic)
2091 #if TARGET_MACHO
2092 rtx temp_reg = ((reload_in_progress || reload_completed)
2093 ? operands[0] : NULL);
2095 operands[1] =
2096 rs6000_machopic_legitimize_pic_address (operands[1], mode,
2097 temp_reg);
2098 #endif
2099 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2100 return;
2103 /* If we are to limit the number of things we put in the TOC and
2104 this is a symbol plus a constant we can add in one insn,
2105 just put the symbol in the TOC and add the constant. Don't do
2106 this if reload is in progress. */
2107 if (GET_CODE (operands[1]) == CONST
2108 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
2109 && GET_CODE (XEXP (operands[1], 0)) == PLUS
2110 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
2111 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
2112 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
2113 && ! side_effects_p (operands[0]))
2115 rtx sym =
2116 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
2117 rtx other = XEXP (XEXP (operands[1], 0), 1);
2119 sym = force_reg (mode, sym);
2120 if (mode == SImode)
2121 emit_insn (gen_addsi3 (operands[0], sym, other));
2122 else
2123 emit_insn (gen_adddi3 (operands[0], sym, other));
2124 return;
2127 operands[1] = force_const_mem (mode, operands[1]);
2129 if (TARGET_TOC
2130 && CONSTANT_POOL_EXPR_P (XEXP (operands[1], 0))
2131 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
2132 get_pool_constant (XEXP (operands[1], 0)),
2133 get_pool_mode (XEXP (operands[1], 0))))
2135 operands[1]
2136 = gen_rtx_MEM (mode,
2137 create_TOC_reference (XEXP (operands[1], 0)));
2138 set_mem_alias_set (operands[1], get_TOC_alias_set ());
2139 RTX_UNCHANGING_P (operands[1]) = 1;
2142 break;
2144 case TImode:
2145 if (GET_CODE (operands[0]) == MEM
2146 && GET_CODE (XEXP (operands[0], 0)) != REG
2147 && ! reload_in_progress)
2148 operands[0]
2149 = replace_equiv_address (operands[0],
2150 copy_addr_to_reg (XEXP (operands[0], 0)));
2152 if (GET_CODE (operands[1]) == MEM
2153 && GET_CODE (XEXP (operands[1], 0)) != REG
2154 && ! reload_in_progress)
2155 operands[1]
2156 = replace_equiv_address (operands[1],
2157 copy_addr_to_reg (XEXP (operands[1], 0)));
2158 break;
2160 default:
2161 abort ();
2164 /* Above, we may have called force_const_mem which may have returned
2165 an invalid address. If we can, fix this up; otherwise, reload will
2166 have to deal with it. */
2167 if (GET_CODE (operands[1]) == MEM
2168 && ! memory_address_p (mode, XEXP (operands[1], 0))
2169 && ! reload_in_progress)
2170 operands[1] = adjust_address (operands[1], mode, 0);
2172 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2173 return;
2176 /* Initialize a variable CUM of type CUMULATIVE_ARGS
2177 for a call to a function whose data type is FNTYPE.
2178 For a library call, FNTYPE is 0.
2180 For incoming args we set the number of arguments in the prototype large
2181 so we never return a PARALLEL. */
2183 void
2184 init_cumulative_args (cum, fntype, libname, incoming)
2185 CUMULATIVE_ARGS *cum;
2186 tree fntype;
2187 rtx libname ATTRIBUTE_UNUSED;
2188 int incoming;
2190 static CUMULATIVE_ARGS zero_cumulative;
2192 *cum = zero_cumulative;
2193 cum->words = 0;
2194 cum->fregno = FP_ARG_MIN_REG;
2195 cum->vregno = ALTIVEC_ARG_MIN_REG;
2196 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
2197 cum->call_cookie = CALL_NORMAL;
2198 cum->sysv_gregno = GP_ARG_MIN_REG;
2200 if (incoming)
2201 cum->nargs_prototype = 1000; /* don't return a PARALLEL */
2203 else if (cum->prototype)
2204 cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
2205 + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
2206 || RETURN_IN_MEMORY (TREE_TYPE (fntype))));
2208 else
2209 cum->nargs_prototype = 0;
2211 cum->orig_nargs = cum->nargs_prototype;
2213 /* Check for longcall's */
2214 if (fntype && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype)))
2215 cum->call_cookie = CALL_LONG;
2217 if (TARGET_DEBUG_ARG)
2219 fprintf (stderr, "\ninit_cumulative_args:");
2220 if (fntype)
2222 tree ret_type = TREE_TYPE (fntype);
2223 fprintf (stderr, " ret code = %s,",
2224 tree_code_name[ (int)TREE_CODE (ret_type) ]);
2227 if (cum->call_cookie & CALL_LONG)
2228 fprintf (stderr, " longcall,");
2230 fprintf (stderr, " proto = %d, nargs = %d\n",
2231 cum->prototype, cum->nargs_prototype);
2235 /* If defined, a C expression which determines whether, and in which
2236 direction, to pad out an argument with extra space. The value
2237 should be of type `enum direction': either `upward' to pad above
2238 the argument, `downward' to pad below, or `none' to inhibit
2239 padding.
2241 For the AIX ABI structs are always stored left shifted in their
2242 argument slot. */
2244 enum direction
2245 function_arg_padding (mode, type)
2246 enum machine_mode mode;
2247 tree type;
2249 if (type != 0 && AGGREGATE_TYPE_P (type))
2250 return upward;
2252 /* This is the default definition. */
2253 return (! BYTES_BIG_ENDIAN
2254 ? upward
2255 : ((mode == BLKmode
2256 ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2257 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
2258 : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
2259 ? downward : upward));
2262 /* If defined, a C expression that gives the alignment boundary, in bits,
2263 of an argument with the specified mode and type. If it is not defined,
2264 PARM_BOUNDARY is used for all arguments.
2266 V.4 wants long longs to be double word aligned. */
2269 function_arg_boundary (mode, type)
2270 enum machine_mode mode;
2271 tree type ATTRIBUTE_UNUSED;
2273 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
2274 return 64;
2275 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2276 return 128;
2277 else
2278 return PARM_BOUNDARY;
2281 /* Update the data in CUM to advance over an argument
2282 of mode MODE and data type TYPE.
2283 (TYPE is null for libcalls where that information may not be available.) */
2285 void
2286 function_arg_advance (cum, mode, type, named)
2287 CUMULATIVE_ARGS *cum;
2288 enum machine_mode mode;
2289 tree type;
2290 int named;
2292 cum->nargs_prototype--;
2294 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2296 if (cum->vregno <= ALTIVEC_ARG_MAX_REG && cum->nargs_prototype >= 0)
2297 cum->vregno++;
2298 else
2299 cum->words += RS6000_ARG_SIZE (mode, type);
2301 else if (DEFAULT_ABI == ABI_V4)
2303 if (TARGET_HARD_FLOAT
2304 && (mode == SFmode || mode == DFmode))
2306 if (cum->fregno <= FP_ARG_V4_MAX_REG)
2307 cum->fregno++;
2308 else
2310 if (mode == DFmode)
2311 cum->words += cum->words & 1;
2312 cum->words += RS6000_ARG_SIZE (mode, type);
2315 else
2317 int n_words;
2318 int gregno = cum->sysv_gregno;
2320 /* Aggregates and IEEE quad get passed by reference. */
2321 if ((type && AGGREGATE_TYPE_P (type))
2322 || mode == TFmode)
2323 n_words = 1;
2324 else
2325 n_words = RS6000_ARG_SIZE (mode, type);
2327 /* Long long is put in odd registers. */
2328 if (n_words == 2 && (gregno & 1) == 0)
2329 gregno += 1;
2331 /* Long long is not split between registers and stack. */
2332 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
2334 /* Long long is aligned on the stack. */
2335 if (n_words == 2)
2336 cum->words += cum->words & 1;
2337 cum->words += n_words;
2340 /* Note: continuing to accumulate gregno past when we've started
2341 spilling to the stack indicates the fact that we've started
2342 spilling to the stack to expand_builtin_saveregs. */
2343 cum->sysv_gregno = gregno + n_words;
2346 if (TARGET_DEBUG_ARG)
2348 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2349 cum->words, cum->fregno);
2350 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
2351 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
2352 fprintf (stderr, "mode = %4s, named = %d\n",
2353 GET_MODE_NAME (mode), named);
2356 else
2358 int align = (TARGET_32BIT && (cum->words & 1) != 0
2359 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
2361 cum->words += align + RS6000_ARG_SIZE (mode, type);
2363 if (GET_MODE_CLASS (mode) == MODE_FLOAT && TARGET_HARD_FLOAT)
2364 cum->fregno++;
2366 if (TARGET_DEBUG_ARG)
2368 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2369 cum->words, cum->fregno);
2370 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
2371 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
2372 fprintf (stderr, "named = %d, align = %d\n", named, align);
2377 /* Determine where to put an argument to a function.
2378 Value is zero to push the argument on the stack,
2379 or a hard register in which to store the argument.
2381 MODE is the argument's machine mode.
2382 TYPE is the data type of the argument (as a tree).
2383 This is null for libcalls where that information may
2384 not be available.
2385 CUM is a variable of type CUMULATIVE_ARGS which gives info about
2386 the preceding args and about the function being called.
2387 NAMED is nonzero if this argument is a named parameter
2388 (otherwise it is an extra parameter matching an ellipsis).
2390 On RS/6000 the first eight words of non-FP are normally in registers
2391 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
2392 Under V.4, the first 8 FP args are in registers.
2394 If this is floating-point and no prototype is specified, we use
2395 both an FP and integer register (or possibly FP reg and stack). Library
2396 functions (when TYPE is zero) always have the proper types for args,
2397 so we can pass the FP value just in one register. emit_library_function
2398 doesn't support PARALLEL anyway. */
2400 struct rtx_def *
2401 function_arg (cum, mode, type, named)
2402 CUMULATIVE_ARGS *cum;
2403 enum machine_mode mode;
2404 tree type;
2405 int named;
2407 enum rs6000_abi abi = DEFAULT_ABI;
2409 /* Return a marker to indicate whether CR1 needs to set or clear the
2410 bit that V.4 uses to say fp args were passed in registers.
2411 Assume that we don't need the marker for software floating point,
2412 or compiler generated library calls. */
2413 if (mode == VOIDmode)
2415 if (abi == ABI_V4
2416 && TARGET_HARD_FLOAT
2417 && cum->nargs_prototype < 0
2418 && type && (cum->prototype || TARGET_NO_PROTOTYPE))
2420 return GEN_INT (cum->call_cookie
2421 | ((cum->fregno == FP_ARG_MIN_REG)
2422 ? CALL_V4_SET_FP_ARGS
2423 : CALL_V4_CLEAR_FP_ARGS));
2426 return GEN_INT (cum->call_cookie);
2429 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2431 if (named && cum->vregno <= ALTIVEC_ARG_MAX_REG)
2432 return gen_rtx_REG (mode, cum->vregno);
2433 else
2434 return NULL;
2436 else if (abi == ABI_V4)
2438 if (TARGET_HARD_FLOAT
2439 && (mode == SFmode || mode == DFmode))
2441 if (cum->fregno <= FP_ARG_V4_MAX_REG)
2442 return gen_rtx_REG (mode, cum->fregno);
2443 else
2444 return NULL;
2446 else
2448 int n_words;
2449 int gregno = cum->sysv_gregno;
2451 /* Aggregates and IEEE quad get passed by reference. */
2452 if ((type && AGGREGATE_TYPE_P (type))
2453 || mode == TFmode)
2454 n_words = 1;
2455 else
2456 n_words = RS6000_ARG_SIZE (mode, type);
2458 /* Long long is put in odd registers. */
2459 if (n_words == 2 && (gregno & 1) == 0)
2460 gregno += 1;
2462 /* Long long is not split between registers and stack. */
2463 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
2464 return gen_rtx_REG (mode, gregno);
2465 else
2466 return NULL;
2469 else
2471 int align = (TARGET_32BIT && (cum->words & 1) != 0
2472 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
2473 int align_words = cum->words + align;
2475 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2476 return NULL_RTX;
2478 if (USE_FP_FOR_ARG_P (*cum, mode, type))
2480 if (! type
2481 || ((cum->nargs_prototype > 0)
2482 /* IBM AIX extended its linkage convention definition always
2483 to require FP args after register save area hole on the
2484 stack. */
2485 && (DEFAULT_ABI != ABI_AIX
2486 || ! TARGET_XL_CALL
2487 || (align_words < GP_ARG_NUM_REG))))
2488 return gen_rtx_REG (mode, cum->fregno);
2490 return gen_rtx_PARALLEL (mode,
2491 gen_rtvec (2,
2492 gen_rtx_EXPR_LIST (VOIDmode,
2493 ((align_words >= GP_ARG_NUM_REG)
2494 ? NULL_RTX
2495 : (align_words
2496 + RS6000_ARG_SIZE (mode, type)
2497 > GP_ARG_NUM_REG
2498 /* If this is partially on the stack, then
2499 we only include the portion actually
2500 in registers here. */
2501 ? gen_rtx_REG (SImode,
2502 GP_ARG_MIN_REG + align_words)
2503 : gen_rtx_REG (mode,
2504 GP_ARG_MIN_REG + align_words))),
2505 const0_rtx),
2506 gen_rtx_EXPR_LIST (VOIDmode,
2507 gen_rtx_REG (mode, cum->fregno),
2508 const0_rtx)));
2510 else if (align_words < GP_ARG_NUM_REG)
2511 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
2512 else
2513 return NULL_RTX;
2517 /* For an arg passed partly in registers and partly in memory,
2518 this is the number of registers used.
2519 For args passed entirely in registers or entirely in memory, zero. */
2522 function_arg_partial_nregs (cum, mode, type, named)
2523 CUMULATIVE_ARGS *cum;
2524 enum machine_mode mode;
2525 tree type;
2526 int named ATTRIBUTE_UNUSED;
2528 if (DEFAULT_ABI == ABI_V4)
2529 return 0;
2531 if (USE_FP_FOR_ARG_P (*cum, mode, type)
2532 || USE_ALTIVEC_FOR_ARG_P (*cum, mode, type))
2534 if (cum->nargs_prototype >= 0)
2535 return 0;
2538 if (cum->words < GP_ARG_NUM_REG
2539 && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
2541 int ret = GP_ARG_NUM_REG - cum->words;
2542 if (ret && TARGET_DEBUG_ARG)
2543 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
2545 return ret;
2548 return 0;
2551 /* A C expression that indicates when an argument must be passed by
2552 reference. If nonzero for an argument, a copy of that argument is
2553 made in memory and a pointer to the argument is passed instead of
2554 the argument itself. The pointer is passed in whatever way is
2555 appropriate for passing a pointer to that type.
2557 Under V.4, structures and unions are passed by reference. */
2560 function_arg_pass_by_reference (cum, mode, type, named)
2561 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
2562 enum machine_mode mode ATTRIBUTE_UNUSED;
2563 tree type;
2564 int named ATTRIBUTE_UNUSED;
2566 if (DEFAULT_ABI == ABI_V4
2567 && ((type && AGGREGATE_TYPE_P (type))
2568 || mode == TFmode))
2570 if (TARGET_DEBUG_ARG)
2571 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
2573 return 1;
2576 return 0;
2579 /* Perform any needed actions needed for a function that is receiving a
2580 variable number of arguments.
2582 CUM is as above.
2584 MODE and TYPE are the mode and type of the current parameter.
2586 PRETEND_SIZE is a variable that should be set to the amount of stack
2587 that must be pushed by the prolog to pretend that our caller pushed
2590 Normally, this macro will push all remaining incoming registers on the
2591 stack and set PRETEND_SIZE to the length of the registers pushed. */
2593 void
2594 setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
2595 CUMULATIVE_ARGS *cum;
2596 enum machine_mode mode;
2597 tree type;
2598 int *pretend_size;
2599 int no_rtl;
2602 CUMULATIVE_ARGS next_cum;
2603 int reg_size = TARGET_32BIT ? 4 : 8;
2604 rtx save_area = NULL_RTX, mem;
2605 int first_reg_offset, set;
2606 tree fntype;
2607 int stdarg_p;
2609 fntype = TREE_TYPE (current_function_decl);
2610 stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
2611 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
2612 != void_type_node));
2614 /* For varargs, we do not want to skip the dummy va_dcl argument.
2615 For stdargs, we do want to skip the last named argument. */
2616 next_cum = *cum;
2617 if (stdarg_p)
2618 function_arg_advance (&next_cum, mode, type, 1);
2620 if (DEFAULT_ABI == ABI_V4)
2622 /* Indicate to allocate space on the stack for varargs save area. */
2623 /* ??? Does this really have to be located at a magic spot on the
2624 stack, or can we allocate this with assign_stack_local instead. */
2625 cfun->machine->sysv_varargs_p = 1;
2626 if (! no_rtl)
2627 save_area = plus_constant (virtual_stack_vars_rtx,
2628 - RS6000_VARARGS_SIZE);
2630 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
2632 else
2634 first_reg_offset = next_cum.words;
2635 save_area = virtual_incoming_args_rtx;
2636 cfun->machine->sysv_varargs_p = 0;
2638 if (MUST_PASS_IN_STACK (mode, type))
2639 first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
2642 set = get_varargs_alias_set ();
2643 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
2645 mem = gen_rtx_MEM (BLKmode,
2646 plus_constant (save_area,
2647 first_reg_offset * reg_size)),
2648 set_mem_alias_set (mem, set);
2649 set_mem_align (mem, BITS_PER_WORD);
2651 move_block_from_reg
2652 (GP_ARG_MIN_REG + first_reg_offset, mem,
2653 GP_ARG_NUM_REG - first_reg_offset,
2654 (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD);
2656 /* ??? Does ABI_V4 need this at all? */
2657 *pretend_size = (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD;
2660 /* Save FP registers if needed. */
2661 if (DEFAULT_ABI == ABI_V4
2662 && TARGET_HARD_FLOAT && ! no_rtl
2663 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
2665 int fregno = next_cum.fregno;
2666 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
2667 rtx lab = gen_label_rtx ();
2668 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
2670 emit_jump_insn (gen_rtx_SET (VOIDmode,
2671 pc_rtx,
2672 gen_rtx_IF_THEN_ELSE (VOIDmode,
2673 gen_rtx_NE (VOIDmode, cr1,
2674 const0_rtx),
2675 gen_rtx_LABEL_REF (VOIDmode, lab),
2676 pc_rtx)));
2678 while (fregno <= FP_ARG_V4_MAX_REG)
2680 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
2681 set_mem_alias_set (mem, set);
2682 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
2683 fregno++;
2684 off += 8;
2687 emit_label (lab);
2691 /* Create the va_list data type. */
2693 tree
2694 rs6000_build_va_list ()
2696 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
2698 /* For AIX, prefer 'char *' because that's what the system
2699 header files like. */
2700 if (DEFAULT_ABI != ABI_V4)
2701 return build_pointer_type (char_type_node);
2703 record = make_lang_type (RECORD_TYPE);
2704 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
2706 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
2707 unsigned_char_type_node);
2708 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
2709 unsigned_char_type_node);
2710 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
2711 ptr_type_node);
2712 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
2713 ptr_type_node);
2715 DECL_FIELD_CONTEXT (f_gpr) = record;
2716 DECL_FIELD_CONTEXT (f_fpr) = record;
2717 DECL_FIELD_CONTEXT (f_ovf) = record;
2718 DECL_FIELD_CONTEXT (f_sav) = record;
2720 TREE_CHAIN (record) = type_decl;
2721 TYPE_NAME (record) = type_decl;
2722 TYPE_FIELDS (record) = f_gpr;
2723 TREE_CHAIN (f_gpr) = f_fpr;
2724 TREE_CHAIN (f_fpr) = f_ovf;
2725 TREE_CHAIN (f_ovf) = f_sav;
2727 layout_type (record);
2729 /* The correct type is an array type of one element. */
2730 return build_array_type (record, build_index_type (size_zero_node));
2733 /* Implement va_start. */
2735 void
2736 rs6000_va_start (stdarg_p, valist, nextarg)
2737 int stdarg_p;
2738 tree valist;
2739 rtx nextarg;
2741 HOST_WIDE_INT words, n_gpr, n_fpr;
2742 tree f_gpr, f_fpr, f_ovf, f_sav;
2743 tree gpr, fpr, ovf, sav, t;
2745 /* Only SVR4 needs something special. */
2746 if (DEFAULT_ABI != ABI_V4)
2748 std_expand_builtin_va_start (stdarg_p, valist, nextarg);
2749 return;
2752 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
2753 f_fpr = TREE_CHAIN (f_gpr);
2754 f_ovf = TREE_CHAIN (f_fpr);
2755 f_sav = TREE_CHAIN (f_ovf);
2757 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
2758 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
2759 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
2760 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
2761 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
2763 /* Count number of gp and fp argument registers used. */
2764 words = current_function_args_info.words;
2765 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
2766 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
2768 if (TARGET_DEBUG_ARG)
2770 fputs ("va_start: words = ", stderr);
2771 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, words);
2772 fputs (", n_gpr = ", stderr);
2773 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_gpr);
2774 fputs (", n_fpr = ", stderr);
2775 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_fpr);
2776 putc ('\n', stderr);
2779 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
2780 TREE_SIDE_EFFECTS (t) = 1;
2781 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2783 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
2784 TREE_SIDE_EFFECTS (t) = 1;
2785 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2787 /* Find the overflow area. */
2788 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
2789 if (words != 0)
2790 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
2791 build_int_2 (words * UNITS_PER_WORD, 0));
2792 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
2793 TREE_SIDE_EFFECTS (t) = 1;
2794 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2796 /* Find the register save area. */
2797 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
2798 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
2799 build_int_2 (-RS6000_VARARGS_SIZE, -1));
2800 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
2801 TREE_SIDE_EFFECTS (t) = 1;
2802 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2805 /* Implement va_arg. */
2808 rs6000_va_arg (valist, type)
2809 tree valist, type;
2811 tree f_gpr, f_fpr, f_ovf, f_sav;
2812 tree gpr, fpr, ovf, sav, reg, t, u;
2813 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
2814 rtx lab_false, lab_over, addr_rtx, r;
2816 /* For AIX, the rule is that structures are passed left-aligned in
2817 their stack slot. However, GCC does not presently do this:
2818 structures which are the same size as integer types are passed
2819 right-aligned, as if they were in fact integers. This only
2820 matters for structures of size 1 or 2, or 4 when TARGET_64BIT. */
2821 if (DEFAULT_ABI != ABI_V4)
2823 HOST_WIDE_INT align, rounded_size;
2824 enum machine_mode mode;
2825 tree addr_tree;
2827 /* Compute the rounded size of the type. */
2828 align = PARM_BOUNDARY / BITS_PER_UNIT;
2829 rounded_size = (((int_size_in_bytes (type) + align - 1) / align)
2830 * align);
2832 addr_tree = valist;
2834 mode = TYPE_MODE (type);
2835 if (mode != BLKmode)
2837 HOST_WIDE_INT adj;
2838 adj = TREE_INT_CST_LOW (TYPE_SIZE (type)) / BITS_PER_UNIT;
2839 if (rounded_size > align)
2840 adj = rounded_size;
2842 addr_tree = build (PLUS_EXPR, TREE_TYPE (addr_tree), addr_tree,
2843 build_int_2 (rounded_size - adj, 0));
2846 addr_rtx = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2847 addr_rtx = copy_to_reg (addr_rtx);
2849 /* Compute new value for AP. */
2850 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
2851 build (PLUS_EXPR, TREE_TYPE (valist), valist,
2852 build_int_2 (rounded_size, 0)));
2853 TREE_SIDE_EFFECTS (t) = 1;
2854 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2856 return addr_rtx;
2859 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
2860 f_fpr = TREE_CHAIN (f_gpr);
2861 f_ovf = TREE_CHAIN (f_fpr);
2862 f_sav = TREE_CHAIN (f_ovf);
2864 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
2865 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
2866 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
2867 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
2868 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
2870 size = int_size_in_bytes (type);
2871 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2873 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
2875 /* Aggregates and long doubles are passed by reference. */
2876 indirect_p = 1;
2877 reg = gpr;
2878 n_reg = 1;
2879 sav_ofs = 0;
2880 sav_scale = 4;
2881 size = rsize = UNITS_PER_WORD;
2883 else if (FLOAT_TYPE_P (type) && ! TARGET_SOFT_FLOAT)
2885 /* FP args go in FP registers, if present. */
2886 indirect_p = 0;
2887 reg = fpr;
2888 n_reg = 1;
2889 sav_ofs = 8*4;
2890 sav_scale = 8;
2892 else
2894 /* Otherwise into GP registers. */
2895 indirect_p = 0;
2896 reg = gpr;
2897 n_reg = rsize;
2898 sav_ofs = 0;
2899 sav_scale = 4;
2902 /* Pull the value out of the saved registers ... */
2904 lab_false = gen_label_rtx ();
2905 lab_over = gen_label_rtx ();
2906 addr_rtx = gen_reg_rtx (Pmode);
2908 emit_cmp_and_jump_insns (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
2909 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
2910 lab_false);
2912 /* Long long is aligned in the registers. */
2913 if (n_reg > 1)
2915 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
2916 build_int_2 (n_reg - 1, 0));
2917 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
2918 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
2919 TREE_SIDE_EFFECTS (u) = 1;
2920 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
2923 if (sav_ofs)
2924 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
2925 else
2926 t = sav;
2928 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, build_int_2 (n_reg, 0));
2929 TREE_SIDE_EFFECTS (u) = 1;
2931 u = build1 (CONVERT_EXPR, integer_type_node, u);
2932 TREE_SIDE_EFFECTS (u) = 1;
2934 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
2935 TREE_SIDE_EFFECTS (u) = 1;
2937 t = build (PLUS_EXPR, ptr_type_node, t, u);
2938 TREE_SIDE_EFFECTS (t) = 1;
2940 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
2941 if (r != addr_rtx)
2942 emit_move_insn (addr_rtx, r);
2944 emit_jump_insn (gen_jump (lab_over));
2945 emit_barrier ();
2946 emit_label (lab_false);
2948 /* ... otherwise out of the overflow area. */
2950 /* Make sure we don't find reg 7 for the next int arg. */
2951 if (n_reg > 1)
2953 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
2954 TREE_SIDE_EFFECTS (t) = 1;
2955 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2958 /* Care for on-stack alignment if needed. */
2959 if (rsize <= 1)
2960 t = ovf;
2961 else
2963 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (7, 0));
2964 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-8, -1));
2966 t = save_expr (t);
2968 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
2969 if (r != addr_rtx)
2970 emit_move_insn (addr_rtx, r);
2972 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
2973 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
2974 TREE_SIDE_EFFECTS (t) = 1;
2975 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2977 emit_label (lab_over);
2979 if (indirect_p)
2981 r = gen_rtx_MEM (Pmode, addr_rtx);
2982 set_mem_alias_set (r, get_varargs_alias_set ());
2983 emit_move_insn (addr_rtx, r);
2986 return addr_rtx;
2989 /* Builtins. */
2991 #define def_builtin(MASK, NAME, TYPE, CODE) \
2992 do { \
2993 if ((MASK) & target_flags) \
2994 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, NULL); \
2995 } while (0)
2997 struct builtin_description
2999 const unsigned int mask;
3000 const enum insn_code icode;
3001 const char *const name;
3002 const enum rs6000_builtins code;
3005 /* Simple binary operatiors: VECc = foo (VECa, VECb). */
3006 static const struct builtin_description bdesc_2arg[] =
3008 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
3009 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
3010 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
3011 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
3012 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
3013 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
3014 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
3015 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
3016 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
3017 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
3018 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
3019 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
3020 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
3021 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
3022 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
3023 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
3024 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
3025 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
3026 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
3027 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
3028 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
3029 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
3030 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
3031 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
3032 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
3033 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
3034 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
3035 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
3036 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
3037 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
3038 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
3039 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
3040 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
3041 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
3042 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
3043 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
3044 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
3045 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
3046 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
3047 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
3048 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
3049 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
3050 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
3051 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
3052 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
3053 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
3054 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
3055 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
3056 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
3057 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
3058 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
3059 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
3060 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
3061 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
3062 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
3063 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
3064 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
3065 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
3066 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
3067 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
3068 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
3069 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
3070 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
3071 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
3072 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
3073 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
3074 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
3075 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
3076 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
3077 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
3078 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
3079 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
3080 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
3081 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
3082 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
3083 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
3084 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
3085 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
3086 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
3087 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
3088 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
3089 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
3090 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
3091 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
3092 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
3093 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
3094 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
3095 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
3096 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
3097 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
3098 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
3099 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
3100 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
3101 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
3102 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
3103 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
3104 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
3105 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
3106 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
3107 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
3108 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
3109 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
3110 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
3111 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
3112 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
3113 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
3116 static rtx
3117 altivec_expand_binop_builtin (icode, arglist, target)
3118 enum insn_code icode;
3119 tree arglist;
3120 rtx target;
3122 rtx pat;
3123 tree arg0 = TREE_VALUE (arglist);
3124 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3125 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3126 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3127 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3128 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3129 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3131 if (! target
3132 || GET_MODE (target) != tmode
3133 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3134 target = gen_reg_rtx (tmode);
3136 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3137 op0 = copy_to_mode_reg (mode0, op0);
3138 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3139 op1 = copy_to_mode_reg (mode1, op1);
3141 pat = GEN_FCN (icode) (target, op0, op1);
3142 if (! pat)
3143 return 0;
3144 emit_insn (pat);
3146 return target;
3149 static rtx
3150 altivec_expand_builtin (exp, target)
3151 tree exp;
3152 rtx target;
3154 struct builtin_description *d;
3155 size_t i;
3156 enum insn_code icode;
3157 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3158 tree arglist = TREE_OPERAND (exp, 1);
3159 tree arg0, arg1;
3160 rtx op0, op1, pat;
3161 enum machine_mode tmode, mode0, mode1;
3162 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3164 switch (fcode)
3166 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
3167 icode = CODE_FOR_altivec_lvx_16qi;
3168 arg0 = TREE_VALUE (arglist);
3169 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3170 tmode = insn_data[icode].operand[0].mode;
3171 mode0 = insn_data[icode].operand[1].mode;
3173 if (! target
3174 || GET_MODE (target) != tmode
3175 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3176 target = gen_reg_rtx (tmode);
3178 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3179 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3181 pat = GEN_FCN (icode) (target, op0);
3182 if (! pat)
3183 return 0;
3184 emit_insn (pat);
3185 return target;
3186 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
3187 icode = CODE_FOR_altivec_lvx_8hi;
3188 arg0 = TREE_VALUE (arglist);
3189 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3190 tmode = insn_data[icode].operand[0].mode;
3191 mode0 = insn_data[icode].operand[1].mode;
3193 if (! target
3194 || GET_MODE (target) != tmode
3195 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3196 target = gen_reg_rtx (tmode);
3198 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3199 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3201 pat = GEN_FCN (icode) (target, op0);
3202 if (! pat)
3203 return 0;
3204 emit_insn (pat);
3205 return target;
3206 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
3207 icode = CODE_FOR_altivec_lvx_4si;
3208 arg0 = TREE_VALUE (arglist);
3209 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3210 tmode = insn_data[icode].operand[0].mode;
3211 mode0 = insn_data[icode].operand[1].mode;
3213 if (! target
3214 || GET_MODE (target) != tmode
3215 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3216 target = gen_reg_rtx (tmode);
3218 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3219 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3221 pat = GEN_FCN (icode) (target, op0);
3222 if (! pat)
3223 return 0;
3224 emit_insn (pat);
3225 return target;
3226 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
3227 icode = CODE_FOR_altivec_lvx_4sf;
3228 arg0 = TREE_VALUE (arglist);
3229 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3230 tmode = insn_data[icode].operand[0].mode;
3231 mode0 = insn_data[icode].operand[1].mode;
3233 if (! target
3234 || GET_MODE (target) != tmode
3235 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3236 target = gen_reg_rtx (tmode);
3238 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3239 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3241 pat = GEN_FCN (icode) (target, op0);
3242 if (! pat)
3243 return 0;
3244 emit_insn (pat);
3245 return target;
3247 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
3248 icode = CODE_FOR_altivec_stvx_16qi;
3249 arg0 = TREE_VALUE (arglist);
3250 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3251 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3252 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3253 mode0 = insn_data[icode].operand[0].mode;
3254 mode1 = insn_data[icode].operand[1].mode;
3256 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3257 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3258 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3259 op1 = copy_to_mode_reg (mode1, op1);
3261 pat = GEN_FCN (icode) (op0, op1);
3262 if (! pat)
3263 return 0;
3264 emit_insn (pat);
3265 return NULL_RTX;
3266 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
3267 icode = CODE_FOR_altivec_stvx_8hi;
3268 arg0 = TREE_VALUE (arglist);
3269 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3270 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3271 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3272 mode0 = insn_data[icode].operand[0].mode;
3273 mode1 = insn_data[icode].operand[1].mode;
3275 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3276 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3277 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3278 op1 = copy_to_mode_reg (mode1, op1);
3280 pat = GEN_FCN (icode) (op0, op1);
3281 if (! pat)
3282 return 0;
3283 emit_insn (pat);
3284 return NULL_RTX;
3285 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
3286 icode = CODE_FOR_altivec_stvx_4si;
3287 arg0 = TREE_VALUE (arglist);
3288 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3289 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3290 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3291 mode0 = insn_data[icode].operand[0].mode;
3292 mode1 = insn_data[icode].operand[1].mode;
3294 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3295 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3296 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3297 op1 = copy_to_mode_reg (mode1, op1);
3299 pat = GEN_FCN (icode) (op0, op1);
3300 if (! pat)
3301 return 0;
3302 emit_insn (pat);
3303 return NULL_RTX;
3304 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
3305 icode = CODE_FOR_altivec_stvx_4sf;
3306 arg0 = TREE_VALUE (arglist);
3307 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3308 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3309 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3310 mode0 = insn_data[icode].operand[0].mode;
3311 mode1 = insn_data[icode].operand[1].mode;
3313 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3314 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3315 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3316 op1 = copy_to_mode_reg (mode1, op1);
3318 pat = GEN_FCN (icode) (op0, op1);
3319 if (! pat)
3320 return 0;
3321 emit_insn (pat);
3322 return NULL_RTX;
3325 /* Handle simple binary operations. */
3326 d = (struct builtin_description *) bdesc_2arg;
3327 for (i = 0; i < sizeof (bdesc_2arg) / sizeof *d; i++, d++)
3328 if (d->code == fcode)
3329 return altivec_expand_binop_builtin (d->icode, arglist, target);
3331 abort ();
3332 return NULL_RTX;
3335 /* Expand an expression EXP that calls a built-in function,
3336 with result going to TARGET if that's convenient
3337 (and in mode MODE if that's convenient).
3338 SUBTARGET may be used as the target for computing one of EXP's operands.
3339 IGNORE is nonzero if the value is to be ignored. */
3341 static rtx
3342 rs6000_expand_builtin (exp, target, subtarget, mode, ignore)
3343 tree exp;
3344 rtx target;
3345 rtx subtarget ATTRIBUTE_UNUSED;
3346 enum machine_mode mode ATTRIBUTE_UNUSED;
3347 int ignore ATTRIBUTE_UNUSED;
3349 if (TARGET_ALTIVEC)
3350 return altivec_expand_builtin (exp, target);
3352 abort ();
3355 static void
3356 rs6000_init_builtins ()
3358 if (TARGET_ALTIVEC)
3359 altivec_init_builtins ();
3362 static void
3363 altivec_init_builtins (void)
3365 struct builtin_description * d;
3366 size_t i;
3368 tree endlink = void_list_node;
3370 tree pint_type_node = build_pointer_type (integer_type_node);
3371 tree pshort_type_node = build_pointer_type (short_integer_type_node);
3372 tree pchar_type_node = build_pointer_type (char_type_node);
3373 tree pfloat_type_node = build_pointer_type (float_type_node);
3375 /* V4SI foo (int *). */
3376 tree v4si_ftype_pint
3377 = build_function_type (V4SI_type_node,
3378 tree_cons (NULL_TREE, pint_type_node, endlink));
3379 /* V8HI foo (short *). */
3380 tree v8hi_ftype_pshort
3381 = build_function_type (V8HI_type_node,
3382 tree_cons (NULL_TREE, pshort_type_node, endlink));
3383 /* V16QI foo (char *). */
3384 tree v16qi_ftype_pchar
3385 = build_function_type (V16QI_type_node,
3386 tree_cons (NULL_TREE, pchar_type_node, endlink));
3387 /* V4SF foo (float *). */
3388 tree v4sf_ftype_pfloat
3389 = build_function_type (V4SF_type_node,
3390 tree_cons (NULL_TREE, pfloat_type_node, endlink));
3392 /* void foo (int *, V4SI). */
3393 tree void_ftype_pint_v4si
3394 = build_function_type (void_type_node,
3395 tree_cons (NULL_TREE, pint_type_node,
3396 tree_cons (NULL_TREE, V4SI_type_node,
3397 endlink)));
3398 /* void foo (short *, V8HI). */
3399 tree void_ftype_pshort_v8hi
3400 = build_function_type (void_type_node,
3401 tree_cons (NULL_TREE, pshort_type_node,
3402 tree_cons (NULL_TREE, V8HI_type_node,
3403 endlink)));
3404 /* void foo (char *, V16QI). */
3405 tree void_ftype_pchar_v16qi
3406 = build_function_type (void_type_node,
3407 tree_cons (NULL_TREE, pchar_type_node,
3408 tree_cons (NULL_TREE, V16QI_type_node,
3409 endlink)));
3410 /* void foo (float *, V4SF). */
3411 tree void_ftype_pfloat_v4sf
3412 = build_function_type (void_type_node,
3413 tree_cons (NULL_TREE, pfloat_type_node,
3414 tree_cons (NULL_TREE, V4SF_type_node,
3415 endlink)));
3417 tree v4si_ftype_v4si_v4si
3418 = build_function_type (V4SI_type_node,
3419 tree_cons (NULL_TREE, V4SI_type_node,
3420 tree_cons (NULL_TREE, V4SI_type_node,
3421 endlink)));
3423 tree v4sf_ftype_v4sf_v4sf
3424 = build_function_type (V4SF_type_node,
3425 tree_cons (NULL_TREE, V4SF_type_node,
3426 tree_cons (NULL_TREE, V4SF_type_node,
3427 endlink)));
3429 tree v8hi_ftype_v8hi_v8hi
3430 = build_function_type (V8HI_type_node,
3431 tree_cons (NULL_TREE, V8HI_type_node,
3432 tree_cons (NULL_TREE, V8HI_type_node,
3433 endlink)));
3435 tree v16qi_ftype_v16qi_v16qi
3436 = build_function_type (V16QI_type_node,
3437 tree_cons (NULL_TREE, V16QI_type_node,
3438 tree_cons (NULL_TREE, V16QI_type_node,
3439 endlink)));
3441 tree v4si_ftype_v4sf_v4sf
3442 = build_function_type (V4SI_type_node,
3443 tree_cons (NULL_TREE, V4SF_type_node,
3444 tree_cons (NULL_TREE, V4SF_type_node,
3445 endlink)));
3447 tree v8hi_ftype_v16qi_v16qi
3448 = build_function_type (V8HI_type_node,
3449 tree_cons (NULL_TREE, V16QI_type_node,
3450 tree_cons (NULL_TREE, V16QI_type_node,
3451 endlink)));
3453 tree v4si_ftype_v8hi_v8hi
3454 = build_function_type (V4SI_type_node,
3455 tree_cons (NULL_TREE, V8HI_type_node,
3456 tree_cons (NULL_TREE, V8HI_type_node,
3457 endlink)));
3459 tree v8hi_ftype_v4si_v4si
3460 = build_function_type (V8HI_type_node,
3461 tree_cons (NULL_TREE, V4SI_type_node,
3462 tree_cons (NULL_TREE, V4SI_type_node,
3463 endlink)));
3465 tree v16qi_ftype_v8hi_v8hi
3466 = build_function_type (V16QI_type_node,
3467 tree_cons (NULL_TREE, V8HI_type_node,
3468 tree_cons (NULL_TREE, V8HI_type_node,
3469 endlink)));
3471 tree v4si_ftype_v16qi_v4si
3472 = build_function_type (V4SI_type_node,
3473 tree_cons (NULL_TREE, V16QI_type_node,
3474 tree_cons (NULL_TREE, V4SI_type_node,
3475 endlink)));
3477 tree v4si_ftype_v8hi_v4si
3478 = build_function_type (V4SI_type_node,
3479 tree_cons (NULL_TREE, V8HI_type_node,
3480 tree_cons (NULL_TREE, V4SI_type_node,
3481 endlink)));
3483 tree int_ftype_v4si_v4si
3484 = build_function_type (integer_type_node,
3485 tree_cons (NULL_TREE, V4SI_type_node,
3486 tree_cons (NULL_TREE, V4SI_type_node,
3487 endlink)));
3489 tree int_ftype_v4sf_v4sf
3490 = build_function_type (integer_type_node,
3491 tree_cons (NULL_TREE, V4SF_type_node,
3492 tree_cons (NULL_TREE, V4SF_type_node,
3493 endlink)));
3495 tree int_ftype_v16qi_v16qi
3496 = build_function_type (integer_type_node,
3497 tree_cons (NULL_TREE, V16QI_type_node,
3498 tree_cons (NULL_TREE, V16QI_type_node,
3499 endlink)));
3501 tree int_ftype_v8hi_v8hi
3502 = build_function_type (integer_type_node,
3503 tree_cons (NULL_TREE, V8HI_type_node,
3504 tree_cons (NULL_TREE, V8HI_type_node,
3505 endlink)));
3507 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pfloat, ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
3508 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf, ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
3509 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pint, ALTIVEC_BUILTIN_LD_INTERNAL_4si);
3510 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si, ALTIVEC_BUILTIN_ST_INTERNAL_4si);
3511 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pshort, ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
3512 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi, ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
3513 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pchar, ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
3514 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi, ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
3516 /* Add the simple binary operators. */
3517 d = (struct builtin_description *) bdesc_2arg;
3518 for (i = 0; i < sizeof (bdesc_2arg) / sizeof *d; i++, d++)
3520 enum machine_mode mode0, mode1, mode2;
3521 tree type;
3523 if (d->name == 0)
3524 continue;
3526 mode0 = insn_data[d->icode].operand[0].mode;
3527 mode1 = insn_data[d->icode].operand[1].mode;
3528 mode2 = insn_data[d->icode].operand[2].mode;
3530 /* When all three operands are of the same mode. */
3531 if (mode0 == mode1 && mode1 == mode2)
3533 switch (mode0)
3535 case V4SFmode:
3536 type = v4sf_ftype_v4sf_v4sf;
3537 break;
3538 case V4SImode:
3539 type = v4si_ftype_v4si_v4si;
3540 break;
3541 case V16QImode:
3542 type = v16qi_ftype_v16qi_v16qi;
3543 break;
3544 case V8HImode:
3545 type = v8hi_ftype_v8hi_v8hi;
3546 break;
3547 default:
3548 abort ();
3552 /* A few other combos we really don't want to do manually. */
3554 /* vint, vfloat, vfloat. */
3555 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
3556 type = v4si_ftype_v4sf_v4sf;
3558 /* vshort, vchar, vchar. */
3559 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
3560 type = v8hi_ftype_v16qi_v16qi;
3562 /* vint, vshort, vshort. */
3563 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
3564 type = v4si_ftype_v8hi_v8hi;
3566 /* vshort, vint, vint. */
3567 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
3568 type = v8hi_ftype_v4si_v4si;
3570 /* vchar, vshort, vshort. */
3571 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
3572 type = v16qi_ftype_v8hi_v8hi;
3574 /* vint, vchar, vint. */
3575 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
3576 type = v4si_ftype_v16qi_v4si;
3578 /* vint, vshort, vint. */
3579 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
3580 type = v4si_ftype_v8hi_v4si;
3582 /* fixme: aldyh */
3583 /* int, x, x. */
3584 else if (mode0 == SImode)
3586 switch (mode1)
3588 case V4SImode:
3589 type = int_ftype_v4si_v4si;
3590 break;
3591 case V4SFmode:
3592 type = int_ftype_v4sf_v4sf;
3593 break;
3594 case V16QImode:
3595 type = int_ftype_v16qi_v16qi;
3596 break;
3597 case V8HImode:
3598 type = int_ftype_v8hi_v8hi;
3599 break;
3600 default:
3601 abort ();
3605 else
3606 abort ();
3608 def_builtin (d->mask, d->name, type, d->code);
3613 /* Generate a memory reference for expand_block_move, copying volatile,
3614 and other bits from an original memory reference. */
3616 static rtx
3617 expand_block_move_mem (mode, addr, orig_mem)
3618 enum machine_mode mode;
3619 rtx addr;
3620 rtx orig_mem;
3622 rtx mem = gen_rtx_MEM (mode, addr);
3624 MEM_COPY_ATTRIBUTES (mem, orig_mem);
3625 return mem;
3628 /* Expand a block move operation, and return 1 if successful. Return 0
3629 if we should let the compiler generate normal code.
3631 operands[0] is the destination
3632 operands[1] is the source
3633 operands[2] is the length
3634 operands[3] is the alignment */
3636 #define MAX_MOVE_REG 4
3639 expand_block_move (operands)
3640 rtx operands[];
3642 rtx orig_dest = operands[0];
3643 rtx orig_src = operands[1];
3644 rtx bytes_rtx = operands[2];
3645 rtx align_rtx = operands[3];
3646 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
3647 int align;
3648 int bytes;
3649 int offset;
3650 int num_reg;
3651 int i;
3652 rtx src_reg;
3653 rtx dest_reg;
3654 rtx src_addr;
3655 rtx dest_addr;
3656 rtx tmp_reg;
3657 rtx stores[MAX_MOVE_REG];
3658 int move_bytes;
3660 /* If this is not a fixed size move, just call memcpy */
3661 if (! constp)
3662 return 0;
3664 /* If this is not a fixed size alignment, abort */
3665 if (GET_CODE (align_rtx) != CONST_INT)
3666 abort ();
3667 align = INTVAL (align_rtx);
3669 /* Anything to move? */
3670 bytes = INTVAL (bytes_rtx);
3671 if (bytes <= 0)
3672 return 1;
3674 /* Don't support real large moves. If string instructions are not used,
3675 then don't generate more than 8 loads. */
3676 if (TARGET_STRING)
3678 if (bytes > 8*4)
3679 return 0;
3681 else if (! STRICT_ALIGNMENT)
3683 if (TARGET_POWERPC64 && align >= 4)
3685 if (bytes > 8*8)
3686 return 0;
3688 else
3689 if (bytes > 8*4)
3690 return 0;
3692 else if (bytes > 8*align)
3693 return 0;
3695 /* Move the address into scratch registers. */
3696 dest_reg = copy_addr_to_reg (XEXP (orig_dest, 0));
3697 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
3699 if (TARGET_STRING) /* string instructions are available */
3701 for ( ; bytes > 0; bytes -= move_bytes)
3703 if (bytes > 24 /* move up to 32 bytes at a time */
3704 && ! fixed_regs[5]
3705 && ! fixed_regs[6]
3706 && ! fixed_regs[7]
3707 && ! fixed_regs[8]
3708 && ! fixed_regs[9]
3709 && ! fixed_regs[10]
3710 && ! fixed_regs[11]
3711 && ! fixed_regs[12])
3713 move_bytes = (bytes > 32) ? 32 : bytes;
3714 emit_insn (gen_movstrsi_8reg (expand_block_move_mem (BLKmode,
3715 dest_reg,
3716 orig_dest),
3717 expand_block_move_mem (BLKmode,
3718 src_reg,
3719 orig_src),
3720 GEN_INT ((move_bytes == 32)
3721 ? 0 : move_bytes),
3722 align_rtx));
3724 else if (bytes > 16 /* move up to 24 bytes at a time */
3725 && ! fixed_regs[5]
3726 && ! fixed_regs[6]
3727 && ! fixed_regs[7]
3728 && ! fixed_regs[8]
3729 && ! fixed_regs[9]
3730 && ! fixed_regs[10])
3732 move_bytes = (bytes > 24) ? 24 : bytes;
3733 emit_insn (gen_movstrsi_6reg (expand_block_move_mem (BLKmode,
3734 dest_reg,
3735 orig_dest),
3736 expand_block_move_mem (BLKmode,
3737 src_reg,
3738 orig_src),
3739 GEN_INT (move_bytes),
3740 align_rtx));
3742 else if (bytes > 8 /* move up to 16 bytes at a time */
3743 && ! fixed_regs[5]
3744 && ! fixed_regs[6]
3745 && ! fixed_regs[7]
3746 && ! fixed_regs[8])
3748 move_bytes = (bytes > 16) ? 16 : bytes;
3749 emit_insn (gen_movstrsi_4reg (expand_block_move_mem (BLKmode,
3750 dest_reg,
3751 orig_dest),
3752 expand_block_move_mem (BLKmode,
3753 src_reg,
3754 orig_src),
3755 GEN_INT (move_bytes),
3756 align_rtx));
3758 else if (bytes >= 8 && TARGET_POWERPC64
3759 /* 64-bit loads and stores require word-aligned
3760 displacements. */
3761 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
3763 move_bytes = 8;
3764 tmp_reg = gen_reg_rtx (DImode);
3765 emit_move_insn (tmp_reg,
3766 expand_block_move_mem (DImode,
3767 src_reg, orig_src));
3768 emit_move_insn (expand_block_move_mem (DImode,
3769 dest_reg, orig_dest),
3770 tmp_reg);
3772 else if (bytes > 4)
3773 { /* move up to 8 bytes at a time */
3774 move_bytes = (bytes > 8) ? 8 : bytes;
3775 emit_insn (gen_movstrsi_2reg (expand_block_move_mem (BLKmode,
3776 dest_reg,
3777 orig_dest),
3778 expand_block_move_mem (BLKmode,
3779 src_reg,
3780 orig_src),
3781 GEN_INT (move_bytes),
3782 align_rtx));
3784 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
3785 { /* move 4 bytes */
3786 move_bytes = 4;
3787 tmp_reg = gen_reg_rtx (SImode);
3788 emit_move_insn (tmp_reg,
3789 expand_block_move_mem (SImode,
3790 src_reg, orig_src));
3791 emit_move_insn (expand_block_move_mem (SImode,
3792 dest_reg, orig_dest),
3793 tmp_reg);
3795 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
3796 { /* move 2 bytes */
3797 move_bytes = 2;
3798 tmp_reg = gen_reg_rtx (HImode);
3799 emit_move_insn (tmp_reg,
3800 expand_block_move_mem (HImode,
3801 src_reg, orig_src));
3802 emit_move_insn (expand_block_move_mem (HImode,
3803 dest_reg, orig_dest),
3804 tmp_reg);
3806 else if (bytes == 1) /* move 1 byte */
3808 move_bytes = 1;
3809 tmp_reg = gen_reg_rtx (QImode);
3810 emit_move_insn (tmp_reg,
3811 expand_block_move_mem (QImode,
3812 src_reg, orig_src));
3813 emit_move_insn (expand_block_move_mem (QImode,
3814 dest_reg, orig_dest),
3815 tmp_reg);
3817 else
3818 { /* move up to 4 bytes at a time */
3819 move_bytes = (bytes > 4) ? 4 : bytes;
3820 emit_insn (gen_movstrsi_1reg (expand_block_move_mem (BLKmode,
3821 dest_reg,
3822 orig_dest),
3823 expand_block_move_mem (BLKmode,
3824 src_reg,
3825 orig_src),
3826 GEN_INT (move_bytes),
3827 align_rtx));
3830 if (bytes > move_bytes)
3832 if (! TARGET_POWERPC64)
3834 emit_insn (gen_addsi3 (src_reg, src_reg,
3835 GEN_INT (move_bytes)));
3836 emit_insn (gen_addsi3 (dest_reg, dest_reg,
3837 GEN_INT (move_bytes)));
3839 else
3841 emit_insn (gen_adddi3 (src_reg, src_reg,
3842 GEN_INT (move_bytes)));
3843 emit_insn (gen_adddi3 (dest_reg, dest_reg,
3844 GEN_INT (move_bytes)));
3850 else /* string instructions not available */
3852 num_reg = offset = 0;
3853 for ( ; bytes > 0; (bytes -= move_bytes), (offset += move_bytes))
3855 /* Calculate the correct offset for src/dest */
3856 if (offset == 0)
3858 src_addr = src_reg;
3859 dest_addr = dest_reg;
3861 else
3863 src_addr = plus_constant (src_reg, offset);
3864 dest_addr = plus_constant (dest_reg, offset);
3867 /* Generate the appropriate load and store, saving the stores
3868 for later. */
3869 if (bytes >= 8 && TARGET_POWERPC64
3870 /* 64-bit loads and stores require word-aligned
3871 displacements. */
3872 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
3874 move_bytes = 8;
3875 tmp_reg = gen_reg_rtx (DImode);
3876 emit_insn (gen_movdi (tmp_reg,
3877 expand_block_move_mem (DImode,
3878 src_addr,
3879 orig_src)));
3880 stores[num_reg++] = gen_movdi (expand_block_move_mem (DImode,
3881 dest_addr,
3882 orig_dest),
3883 tmp_reg);
3885 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
3887 move_bytes = 4;
3888 tmp_reg = gen_reg_rtx (SImode);
3889 emit_insn (gen_movsi (tmp_reg,
3890 expand_block_move_mem (SImode,
3891 src_addr,
3892 orig_src)));
3893 stores[num_reg++] = gen_movsi (expand_block_move_mem (SImode,
3894 dest_addr,
3895 orig_dest),
3896 tmp_reg);
3898 else if (bytes >= 2 && (align >= 2 || ! STRICT_ALIGNMENT))
3900 move_bytes = 2;
3901 tmp_reg = gen_reg_rtx (HImode);
3902 emit_insn (gen_movhi (tmp_reg,
3903 expand_block_move_mem (HImode,
3904 src_addr,
3905 orig_src)));
3906 stores[num_reg++] = gen_movhi (expand_block_move_mem (HImode,
3907 dest_addr,
3908 orig_dest),
3909 tmp_reg);
3911 else
3913 move_bytes = 1;
3914 tmp_reg = gen_reg_rtx (QImode);
3915 emit_insn (gen_movqi (tmp_reg,
3916 expand_block_move_mem (QImode,
3917 src_addr,
3918 orig_src)));
3919 stores[num_reg++] = gen_movqi (expand_block_move_mem (QImode,
3920 dest_addr,
3921 orig_dest),
3922 tmp_reg);
3925 if (num_reg >= MAX_MOVE_REG)
3927 for (i = 0; i < num_reg; i++)
3928 emit_insn (stores[i]);
3929 num_reg = 0;
3933 for (i = 0; i < num_reg; i++)
3934 emit_insn (stores[i]);
3937 return 1;
3941 /* Return 1 if OP is a load multiple operation. It is known to be a
3942 PARALLEL and the first section will be tested. */
3945 load_multiple_operation (op, mode)
3946 rtx op;
3947 enum machine_mode mode ATTRIBUTE_UNUSED;
3949 int count = XVECLEN (op, 0);
3950 unsigned int dest_regno;
3951 rtx src_addr;
3952 int i;
3954 /* Perform a quick check so we don't blow up below. */
3955 if (count <= 1
3956 || GET_CODE (XVECEXP (op, 0, 0)) != SET
3957 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
3958 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
3959 return 0;
3961 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
3962 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
3964 for (i = 1; i < count; i++)
3966 rtx elt = XVECEXP (op, 0, i);
3968 if (GET_CODE (elt) != SET
3969 || GET_CODE (SET_DEST (elt)) != REG
3970 || GET_MODE (SET_DEST (elt)) != SImode
3971 || REGNO (SET_DEST (elt)) != dest_regno + i
3972 || GET_CODE (SET_SRC (elt)) != MEM
3973 || GET_MODE (SET_SRC (elt)) != SImode
3974 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
3975 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
3976 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
3977 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
3978 return 0;
3981 return 1;
3984 /* Similar, but tests for store multiple. Here, the second vector element
3985 is a CLOBBER. It will be tested later. */
3988 store_multiple_operation (op, mode)
3989 rtx op;
3990 enum machine_mode mode ATTRIBUTE_UNUSED;
3992 int count = XVECLEN (op, 0) - 1;
3993 unsigned int src_regno;
3994 rtx dest_addr;
3995 int i;
3997 /* Perform a quick check so we don't blow up below. */
3998 if (count <= 1
3999 || GET_CODE (XVECEXP (op, 0, 0)) != SET
4000 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
4001 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
4002 return 0;
4004 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
4005 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
4007 for (i = 1; i < count; i++)
4009 rtx elt = XVECEXP (op, 0, i + 1);
4011 if (GET_CODE (elt) != SET
4012 || GET_CODE (SET_SRC (elt)) != REG
4013 || GET_MODE (SET_SRC (elt)) != SImode
4014 || REGNO (SET_SRC (elt)) != src_regno + i
4015 || GET_CODE (SET_DEST (elt)) != MEM
4016 || GET_MODE (SET_DEST (elt)) != SImode
4017 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
4018 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
4019 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
4020 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
4021 return 0;
4024 return 1;
4027 /* Return 1 for a parallel vrsave operation. */
4030 vrsave_operation (op, mode)
4031 rtx op;
4032 enum machine_mode mode ATTRIBUTE_UNUSED;
4034 int count = XVECLEN (op, 0);
4035 unsigned int dest_regno, src_regno;
4036 int i;
4038 if (count <= 1
4039 || GET_CODE (XVECEXP (op, 0, 0)) != SET
4040 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
4041 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC)
4042 return 0;
4044 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
4045 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
4047 if (dest_regno != VRSAVE_REGNO
4048 && src_regno != VRSAVE_REGNO)
4049 return 0;
4051 for (i = 1; i < count; i++)
4053 rtx elt = XVECEXP (op, 0, i);
4055 if (GET_CODE (elt) != CLOBBER)
4056 return 0;
4059 return 1;
4062 /* Return 1 for an PARALLEL suitable for mtcrf. */
4065 mtcrf_operation (op, mode)
4066 rtx op;
4067 enum machine_mode mode ATTRIBUTE_UNUSED;
4069 int count = XVECLEN (op, 0);
4070 int i;
4071 rtx src_reg;
4073 /* Perform a quick check so we don't blow up below. */
4074 if (count < 1
4075 || GET_CODE (XVECEXP (op, 0, 0)) != SET
4076 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
4077 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
4078 return 0;
4079 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
4081 if (GET_CODE (src_reg) != REG
4082 || GET_MODE (src_reg) != SImode
4083 || ! INT_REGNO_P (REGNO (src_reg)))
4084 return 0;
4086 for (i = 0; i < count; i++)
4088 rtx exp = XVECEXP (op, 0, i);
4089 rtx unspec;
4090 int maskval;
4092 if (GET_CODE (exp) != SET
4093 || GET_CODE (SET_DEST (exp)) != REG
4094 || GET_MODE (SET_DEST (exp)) != CCmode
4095 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
4096 return 0;
4097 unspec = SET_SRC (exp);
4098 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
4100 if (GET_CODE (unspec) != UNSPEC
4101 || XINT (unspec, 1) != 20
4102 || XVECLEN (unspec, 0) != 2
4103 || XVECEXP (unspec, 0, 0) != src_reg
4104 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
4105 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
4106 return 0;
4108 return 1;
4111 /* Return 1 for an PARALLEL suitable for lmw. */
4114 lmw_operation (op, mode)
4115 rtx op;
4116 enum machine_mode mode ATTRIBUTE_UNUSED;
4118 int count = XVECLEN (op, 0);
4119 unsigned int dest_regno;
4120 rtx src_addr;
4121 unsigned int base_regno;
4122 HOST_WIDE_INT offset;
4123 int i;
4125 /* Perform a quick check so we don't blow up below. */
4126 if (count <= 1
4127 || GET_CODE (XVECEXP (op, 0, 0)) != SET
4128 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
4129 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
4130 return 0;
4132 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
4133 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
4135 if (dest_regno > 31
4136 || count != 32 - (int) dest_regno)
4137 return 0;
4139 if (LEGITIMATE_INDIRECT_ADDRESS_P (src_addr, 0))
4141 offset = 0;
4142 base_regno = REGNO (src_addr);
4143 if (base_regno == 0)
4144 return 0;
4146 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, src_addr, 0))
4148 offset = INTVAL (XEXP (src_addr, 1));
4149 base_regno = REGNO (XEXP (src_addr, 0));
4151 else
4152 return 0;
4154 for (i = 0; i < count; i++)
4156 rtx elt = XVECEXP (op, 0, i);
4157 rtx newaddr;
4158 rtx addr_reg;
4159 HOST_WIDE_INT newoffset;
4161 if (GET_CODE (elt) != SET
4162 || GET_CODE (SET_DEST (elt)) != REG
4163 || GET_MODE (SET_DEST (elt)) != SImode
4164 || REGNO (SET_DEST (elt)) != dest_regno + i
4165 || GET_CODE (SET_SRC (elt)) != MEM
4166 || GET_MODE (SET_SRC (elt)) != SImode)
4167 return 0;
4168 newaddr = XEXP (SET_SRC (elt), 0);
4169 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
4171 newoffset = 0;
4172 addr_reg = newaddr;
4174 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
4176 addr_reg = XEXP (newaddr, 0);
4177 newoffset = INTVAL (XEXP (newaddr, 1));
4179 else
4180 return 0;
4181 if (REGNO (addr_reg) != base_regno
4182 || newoffset != offset + 4 * i)
4183 return 0;
4186 return 1;
4189 /* Return 1 for an PARALLEL suitable for stmw. */
4192 stmw_operation (op, mode)
4193 rtx op;
4194 enum machine_mode mode ATTRIBUTE_UNUSED;
4196 int count = XVECLEN (op, 0);
4197 unsigned int src_regno;
4198 rtx dest_addr;
4199 unsigned int base_regno;
4200 HOST_WIDE_INT offset;
4201 int i;
4203 /* Perform a quick check so we don't blow up below. */
4204 if (count <= 1
4205 || GET_CODE (XVECEXP (op, 0, 0)) != SET
4206 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
4207 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
4208 return 0;
4210 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
4211 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
4213 if (src_regno > 31
4214 || count != 32 - (int) src_regno)
4215 return 0;
4217 if (LEGITIMATE_INDIRECT_ADDRESS_P (dest_addr, 0))
4219 offset = 0;
4220 base_regno = REGNO (dest_addr);
4221 if (base_regno == 0)
4222 return 0;
4224 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, dest_addr, 0))
4226 offset = INTVAL (XEXP (dest_addr, 1));
4227 base_regno = REGNO (XEXP (dest_addr, 0));
4229 else
4230 return 0;
4232 for (i = 0; i < count; i++)
4234 rtx elt = XVECEXP (op, 0, i);
4235 rtx newaddr;
4236 rtx addr_reg;
4237 HOST_WIDE_INT newoffset;
4239 if (GET_CODE (elt) != SET
4240 || GET_CODE (SET_SRC (elt)) != REG
4241 || GET_MODE (SET_SRC (elt)) != SImode
4242 || REGNO (SET_SRC (elt)) != src_regno + i
4243 || GET_CODE (SET_DEST (elt)) != MEM
4244 || GET_MODE (SET_DEST (elt)) != SImode)
4245 return 0;
4246 newaddr = XEXP (SET_DEST (elt), 0);
4247 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
4249 newoffset = 0;
4250 addr_reg = newaddr;
4252 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
4254 addr_reg = XEXP (newaddr, 0);
4255 newoffset = INTVAL (XEXP (newaddr, 1));
4257 else
4258 return 0;
4259 if (REGNO (addr_reg) != base_regno
4260 || newoffset != offset + 4 * i)
4261 return 0;
4264 return 1;
4267 /* A validation routine: say whether CODE, a condition code, and MODE
4268 match. The other alternatives either don't make sense or should
4269 never be generated. */
4271 static void
4272 validate_condition_mode (code, mode)
4273 enum rtx_code code;
4274 enum machine_mode mode;
4276 if (GET_RTX_CLASS (code) != '<'
4277 || GET_MODE_CLASS (mode) != MODE_CC)
4278 abort ();
4280 /* These don't make sense. */
4281 if ((code == GT || code == LT || code == GE || code == LE)
4282 && mode == CCUNSmode)
4283 abort ();
4285 if ((code == GTU || code == LTU || code == GEU || code == LEU)
4286 && mode != CCUNSmode)
4287 abort ();
4289 if (mode != CCFPmode
4290 && (code == ORDERED || code == UNORDERED
4291 || code == UNEQ || code == LTGT
4292 || code == UNGT || code == UNLT
4293 || code == UNGE || code == UNLE))
4294 abort ();
4296 /* These should never be generated except for
4297 flag_unsafe_math_optimizations. */
4298 if (mode == CCFPmode
4299 && ! flag_unsafe_math_optimizations
4300 && (code == LE || code == GE
4301 || code == UNEQ || code == LTGT
4302 || code == UNGT || code == UNLT))
4303 abort ();
4305 /* These are invalid; the information is not there. */
4306 if (mode == CCEQmode
4307 && code != EQ && code != NE)
4308 abort ();
4311 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
4312 We only check the opcode against the mode of the CC value here. */
4315 branch_comparison_operator (op, mode)
4316 rtx op;
4317 enum machine_mode mode ATTRIBUTE_UNUSED;
4319 enum rtx_code code = GET_CODE (op);
4320 enum machine_mode cc_mode;
4322 if (GET_RTX_CLASS (code) != '<')
4323 return 0;
4325 cc_mode = GET_MODE (XEXP (op, 0));
4326 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
4327 return 0;
4329 validate_condition_mode (code, cc_mode);
4331 return 1;
4334 /* Return 1 if OP is a comparison operation that is valid for a branch
4335 insn and which is true if the corresponding bit in the CC register
4336 is set. */
4339 branch_positive_comparison_operator (op, mode)
4340 rtx op;
4341 enum machine_mode mode;
4343 enum rtx_code code;
4345 if (! branch_comparison_operator (op, mode))
4346 return 0;
4348 code = GET_CODE (op);
4349 return (code == EQ || code == LT || code == GT
4350 || code == LTU || code == GTU
4351 || code == UNORDERED);
4354 /* Return 1 if OP is a comparison operation that is valid for an scc insn.
4355 We check the opcode against the mode of the CC value and disallow EQ or
4356 NE comparisons for integers. */
4359 scc_comparison_operator (op, mode)
4360 rtx op;
4361 enum machine_mode mode;
4363 enum rtx_code code = GET_CODE (op);
4364 enum machine_mode cc_mode;
4366 if (GET_MODE (op) != mode && mode != VOIDmode)
4367 return 0;
4369 if (GET_RTX_CLASS (code) != '<')
4370 return 0;
4372 cc_mode = GET_MODE (XEXP (op, 0));
4373 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
4374 return 0;
4376 validate_condition_mode (code, cc_mode);
4378 if (code == NE && cc_mode != CCFPmode)
4379 return 0;
4381 return 1;
4385 trap_comparison_operator (op, mode)
4386 rtx op;
4387 enum machine_mode mode;
4389 if (mode != VOIDmode && mode != GET_MODE (op))
4390 return 0;
4391 return GET_RTX_CLASS (GET_CODE (op)) == '<';
4395 boolean_operator (op, mode)
4396 rtx op;
4397 enum machine_mode mode ATTRIBUTE_UNUSED;
4399 enum rtx_code code = GET_CODE (op);
4400 return (code == AND || code == IOR || code == XOR);
4404 boolean_or_operator (op, mode)
4405 rtx op;
4406 enum machine_mode mode ATTRIBUTE_UNUSED;
4408 enum rtx_code code = GET_CODE (op);
4409 return (code == IOR || code == XOR);
4413 min_max_operator (op, mode)
4414 rtx op;
4415 enum machine_mode mode ATTRIBUTE_UNUSED;
4417 enum rtx_code code = GET_CODE (op);
4418 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
4421 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
4422 mask required to convert the result of a rotate insn into a shift
4423 left insn of SHIFTOP bits. Both are known to be CONST_INT. */
4426 includes_lshift_p (shiftop, andop)
4427 rtx shiftop;
4428 rtx andop;
4430 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
4432 shift_mask <<= INTVAL (shiftop);
4434 return (INTVAL (andop) & ~shift_mask) == 0;
4437 /* Similar, but for right shift. */
4440 includes_rshift_p (shiftop, andop)
4441 rtx shiftop;
4442 rtx andop;
4444 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
4446 shift_mask >>= INTVAL (shiftop);
4448 return (INTVAL (andop) & ~shift_mask) == 0;
4451 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
4452 to perform a left shift. It must have exactly SHIFTOP least
4453 signifigant 0's, then one or more 1's, then zero or more 0's. */
4456 includes_rldic_lshift_p (shiftop, andop)
4457 rtx shiftop;
4458 rtx andop;
4460 if (GET_CODE (andop) == CONST_INT)
4462 HOST_WIDE_INT c, lsb, shift_mask;
4464 c = INTVAL (andop);
4465 if (c == 0 || c == ~0)
4466 return 0;
4468 shift_mask = ~0;
4469 shift_mask <<= INTVAL (shiftop);
4471 /* Find the least signifigant one bit. */
4472 lsb = c & -c;
4474 /* It must coincide with the LSB of the shift mask. */
4475 if (-lsb != shift_mask)
4476 return 0;
4478 /* Invert to look for the next transition (if any). */
4479 c = ~c;
4481 /* Remove the low group of ones (originally low group of zeros). */
4482 c &= -lsb;
4484 /* Again find the lsb, and check we have all 1's above. */
4485 lsb = c & -c;
4486 return c == -lsb;
4488 else if (GET_CODE (andop) == CONST_DOUBLE
4489 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
4491 HOST_WIDE_INT low, high, lsb;
4492 HOST_WIDE_INT shift_mask_low, shift_mask_high;
4494 low = CONST_DOUBLE_LOW (andop);
4495 if (HOST_BITS_PER_WIDE_INT < 64)
4496 high = CONST_DOUBLE_HIGH (andop);
4498 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
4499 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
4500 return 0;
4502 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
4504 shift_mask_high = ~0;
4505 if (INTVAL (shiftop) > 32)
4506 shift_mask_high <<= INTVAL (shiftop) - 32;
4508 lsb = high & -high;
4510 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
4511 return 0;
4513 high = ~high;
4514 high &= -lsb;
4516 lsb = high & -high;
4517 return high == -lsb;
4520 shift_mask_low = ~0;
4521 shift_mask_low <<= INTVAL (shiftop);
4523 lsb = low & -low;
4525 if (-lsb != shift_mask_low)
4526 return 0;
4528 if (HOST_BITS_PER_WIDE_INT < 64)
4529 high = ~high;
4530 low = ~low;
4531 low &= -lsb;
4533 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
4535 lsb = high & -high;
4536 return high == -lsb;
4539 lsb = low & -low;
4540 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
4542 else
4543 return 0;
4546 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
4547 to perform a left shift. It must have SHIFTOP or more least
4548 signifigant 0's, with the remainder of the word 1's. */
4551 includes_rldicr_lshift_p (shiftop, andop)
4552 rtx shiftop;
4553 rtx andop;
4555 if (GET_CODE (andop) == CONST_INT)
4557 HOST_WIDE_INT c, lsb, shift_mask;
4559 shift_mask = ~0;
4560 shift_mask <<= INTVAL (shiftop);
4561 c = INTVAL (andop);
4563 /* Find the least signifigant one bit. */
4564 lsb = c & -c;
4566 /* It must be covered by the shift mask.
4567 This test also rejects c == 0. */
4568 if ((lsb & shift_mask) == 0)
4569 return 0;
4571 /* Check we have all 1's above the transition, and reject all 1's. */
4572 return c == -lsb && lsb != 1;
4574 else if (GET_CODE (andop) == CONST_DOUBLE
4575 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
4577 HOST_WIDE_INT low, lsb, shift_mask_low;
4579 low = CONST_DOUBLE_LOW (andop);
4581 if (HOST_BITS_PER_WIDE_INT < 64)
4583 HOST_WIDE_INT high, shift_mask_high;
4585 high = CONST_DOUBLE_HIGH (andop);
4587 if (low == 0)
4589 shift_mask_high = ~0;
4590 if (INTVAL (shiftop) > 32)
4591 shift_mask_high <<= INTVAL (shiftop) - 32;
4593 lsb = high & -high;
4595 if ((lsb & shift_mask_high) == 0)
4596 return 0;
4598 return high == -lsb;
4600 if (high != ~0)
4601 return 0;
4604 shift_mask_low = ~0;
4605 shift_mask_low <<= INTVAL (shiftop);
4607 lsb = low & -low;
4609 if ((lsb & shift_mask_low) == 0)
4610 return 0;
4612 return low == -lsb && lsb != 1;
4614 else
4615 return 0;
4618 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
4619 for lfq and stfq insns.
4621 Note reg1 and reg2 *must* be hard registers. To be sure we will
4622 abort if we are passed pseudo registers. */
4625 registers_ok_for_quad_peep (reg1, reg2)
4626 rtx reg1, reg2;
4628 /* We might have been passed a SUBREG. */
4629 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
4630 return 0;
4632 return (REGNO (reg1) == REGNO (reg2) - 1);
4635 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
4636 addr1 and addr2 must be in consecutive memory locations
4637 (addr2 == addr1 + 8). */
4640 addrs_ok_for_quad_peep (addr1, addr2)
4641 rtx addr1;
4642 rtx addr2;
4644 unsigned int reg1;
4645 int offset1;
4647 /* Extract an offset (if used) from the first addr. */
4648 if (GET_CODE (addr1) == PLUS)
4650 /* If not a REG, return zero. */
4651 if (GET_CODE (XEXP (addr1, 0)) != REG)
4652 return 0;
4653 else
4655 reg1 = REGNO (XEXP (addr1, 0));
4656 /* The offset must be constant! */
4657 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
4658 return 0;
4659 offset1 = INTVAL (XEXP (addr1, 1));
4662 else if (GET_CODE (addr1) != REG)
4663 return 0;
4664 else
4666 reg1 = REGNO (addr1);
4667 /* This was a simple (mem (reg)) expression. Offset is 0. */
4668 offset1 = 0;
4671 /* Make sure the second address is a (mem (plus (reg) (const_int))). */
4672 if (GET_CODE (addr2) != PLUS)
4673 return 0;
4675 if (GET_CODE (XEXP (addr2, 0)) != REG
4676 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
4677 return 0;
4679 if (reg1 != REGNO (XEXP (addr2, 0)))
4680 return 0;
4682 /* The offset for the second addr must be 8 more than the first addr. */
4683 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
4684 return 0;
4686 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
4687 instructions. */
4688 return 1;
4691 /* Return the register class of a scratch register needed to copy IN into
4692 or out of a register in CLASS in MODE. If it can be done directly,
4693 NO_REGS is returned. */
4695 enum reg_class
4696 secondary_reload_class (class, mode, in)
4697 enum reg_class class;
4698 enum machine_mode mode ATTRIBUTE_UNUSED;
4699 rtx in;
4701 int regno;
4703 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN && flag_pic))
4705 /* We cannot copy a symbolic operand directly into anything
4706 other than BASE_REGS for TARGET_ELF. So indicate that a
4707 register from BASE_REGS is needed as an intermediate
4708 register.
4710 On Darwin, pic addresses require a load from memory, which
4711 needs a base register. */
4712 if (class != BASE_REGS
4713 && (GET_CODE (in) == SYMBOL_REF
4714 || GET_CODE (in) == HIGH
4715 || GET_CODE (in) == LABEL_REF
4716 || GET_CODE (in) == CONST))
4717 return BASE_REGS;
4720 if (GET_CODE (in) == REG)
4722 regno = REGNO (in);
4723 if (regno >= FIRST_PSEUDO_REGISTER)
4725 regno = true_regnum (in);
4726 if (regno >= FIRST_PSEUDO_REGISTER)
4727 regno = -1;
4730 else if (GET_CODE (in) == SUBREG)
4732 regno = true_regnum (in);
4733 if (regno >= FIRST_PSEUDO_REGISTER)
4734 regno = -1;
4736 else
4737 regno = -1;
4739 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
4740 into anything. */
4741 if (class == GENERAL_REGS || class == BASE_REGS
4742 || (regno >= 0 && INT_REGNO_P (regno)))
4743 return NO_REGS;
4745 /* Constants, memory, and FP registers can go into FP registers. */
4746 if ((regno == -1 || FP_REGNO_P (regno))
4747 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
4748 return NO_REGS;
4750 /* Memory, and AltiVec registers can go into AltiVec registers. */
4751 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
4752 && class == ALTIVEC_REGS)
4753 return NO_REGS;
4755 /* We can copy among the CR registers. */
4756 if ((class == CR_REGS || class == CR0_REGS)
4757 && regno >= 0 && CR_REGNO_P (regno))
4758 return NO_REGS;
4760 /* Otherwise, we need GENERAL_REGS. */
4761 return GENERAL_REGS;
4764 /* Given a comparison operation, return the bit number in CCR to test. We
4765 know this is a valid comparison.
4767 SCC_P is 1 if this is for an scc. That means that %D will have been
4768 used instead of %C, so the bits will be in different places.
4770 Return -1 if OP isn't a valid comparison for some reason. */
4773 ccr_bit (op, scc_p)
4774 rtx op;
4775 int scc_p;
4777 enum rtx_code code = GET_CODE (op);
4778 enum machine_mode cc_mode;
4779 int cc_regnum;
4780 int base_bit;
4781 rtx reg;
4783 if (GET_RTX_CLASS (code) != '<')
4784 return -1;
4786 reg = XEXP (op, 0);
4788 if (GET_CODE (reg) != REG
4789 || ! CR_REGNO_P (REGNO (reg)))
4790 abort ();
4792 cc_mode = GET_MODE (reg);
4793 cc_regnum = REGNO (reg);
4794 base_bit = 4 * (cc_regnum - CR0_REGNO);
4796 validate_condition_mode (code, cc_mode);
4798 switch (code)
4800 case NE:
4801 return scc_p ? base_bit + 3 : base_bit + 2;
4802 case EQ:
4803 return base_bit + 2;
4804 case GT: case GTU: case UNLE:
4805 return base_bit + 1;
4806 case LT: case LTU: case UNGE:
4807 return base_bit;
4808 case ORDERED: case UNORDERED:
4809 return base_bit + 3;
4811 case GE: case GEU:
4812 /* If scc, we will have done a cror to put the bit in the
4813 unordered position. So test that bit. For integer, this is ! LT
4814 unless this is an scc insn. */
4815 return scc_p ? base_bit + 3 : base_bit;
4817 case LE: case LEU:
4818 return scc_p ? base_bit + 3 : base_bit + 1;
4820 default:
4821 abort ();
4825 /* Return the GOT register. */
4827 struct rtx_def *
4828 rs6000_got_register (value)
4829 rtx value ATTRIBUTE_UNUSED;
4831 /* The second flow pass currently (June 1999) can't update
4832 regs_ever_live without disturbing other parts of the compiler, so
4833 update it here to make the prolog/epilogue code happy. */
4834 if (no_new_pseudos && ! regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
4835 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
4837 current_function_uses_pic_offset_table = 1;
4839 return pic_offset_table_rtx;
4842 /* Functions to init, mark and free struct machine_function.
4843 These will be called, via pointer variables,
4844 from push_function_context and pop_function_context. */
4846 static void
4847 rs6000_init_machine_status (p)
4848 struct function *p;
4850 p->machine = (machine_function *) xcalloc (1, sizeof (machine_function));
4853 static void
4854 rs6000_free_machine_status (p)
4855 struct function *p;
4857 if (p->machine == NULL)
4858 return;
4860 free (p->machine);
4861 p->machine = NULL;
4865 /* Print an operand. Recognize special options, documented below. */
4867 #if TARGET_ELF
4868 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
4869 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
4870 #else
4871 #define SMALL_DATA_RELOC "sda21"
4872 #define SMALL_DATA_REG 0
4873 #endif
4875 void
4876 print_operand (file, x, code)
4877 FILE *file;
4878 rtx x;
4879 int code;
4881 int i;
4882 HOST_WIDE_INT val;
4884 /* These macros test for integers and extract the low-order bits. */
4885 #define INT_P(X) \
4886 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
4887 && GET_MODE (X) == VOIDmode)
4889 #define INT_LOWPART(X) \
4890 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
4892 switch (code)
4894 case '.':
4895 /* Write out an instruction after the call which may be replaced
4896 with glue code by the loader. This depends on the AIX version. */
4897 asm_fprintf (file, RS6000_CALL_GLUE);
4898 return;
4900 /* %a is output_address. */
4902 case 'A':
4903 /* If X is a constant integer whose low-order 5 bits are zero,
4904 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
4905 in the AIX assembler where "sri" with a zero shift count
4906 write a trash instruction. */
4907 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
4908 putc ('l', file);
4909 else
4910 putc ('r', file);
4911 return;
4913 case 'b':
4914 /* If constant, low-order 16 bits of constant, unsigned.
4915 Otherwise, write normally. */
4916 if (INT_P (x))
4917 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
4918 else
4919 print_operand (file, x, 0);
4920 return;
4922 case 'B':
4923 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
4924 for 64-bit mask direction. */
4925 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
4926 return;
4928 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
4929 output_operand. */
4931 case 'D':
4932 /* There used to be a comment for 'C' reading "This is an
4933 optional cror needed for certain floating-point
4934 comparisons. Otherwise write nothing." */
4936 /* Similar, except that this is for an scc, so we must be able to
4937 encode the test in a single bit that is one. We do the above
4938 for any LE, GE, GEU, or LEU and invert the bit for NE. */
4939 if (GET_CODE (x) == LE || GET_CODE (x) == GE
4940 || GET_CODE (x) == LEU || GET_CODE (x) == GEU)
4942 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
4944 fprintf (file, "cror %d,%d,%d\n\t", base_bit + 3,
4945 base_bit + 2,
4946 base_bit + (GET_CODE (x) == GE || GET_CODE (x) == GEU));
4949 else if (GET_CODE (x) == NE)
4951 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
4953 fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 3,
4954 base_bit + 2, base_bit + 2);
4956 return;
4958 case 'E':
4959 /* X is a CR register. Print the number of the EQ bit of the CR */
4960 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
4961 output_operand_lossage ("invalid %%E value");
4962 else
4963 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
4964 return;
4966 case 'f':
4967 /* X is a CR register. Print the shift count needed to move it
4968 to the high-order four bits. */
4969 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
4970 output_operand_lossage ("invalid %%f value");
4971 else
4972 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
4973 return;
4975 case 'F':
4976 /* Similar, but print the count for the rotate in the opposite
4977 direction. */
4978 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
4979 output_operand_lossage ("invalid %%F value");
4980 else
4981 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
4982 return;
4984 case 'G':
4985 /* X is a constant integer. If it is negative, print "m",
4986 otherwise print "z". This is to make a aze or ame insn. */
4987 if (GET_CODE (x) != CONST_INT)
4988 output_operand_lossage ("invalid %%G value");
4989 else if (INTVAL (x) >= 0)
4990 putc ('z', file);
4991 else
4992 putc ('m', file);
4993 return;
4995 case 'h':
4996 /* If constant, output low-order five bits. Otherwise, write
4997 normally. */
4998 if (INT_P (x))
4999 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
5000 else
5001 print_operand (file, x, 0);
5002 return;
5004 case 'H':
5005 /* If constant, output low-order six bits. Otherwise, write
5006 normally. */
5007 if (INT_P (x))
5008 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
5009 else
5010 print_operand (file, x, 0);
5011 return;
5013 case 'I':
5014 /* Print `i' if this is a constant, else nothing. */
5015 if (INT_P (x))
5016 putc ('i', file);
5017 return;
5019 case 'j':
5020 /* Write the bit number in CCR for jump. */
5021 i = ccr_bit (x, 0);
5022 if (i == -1)
5023 output_operand_lossage ("invalid %%j code");
5024 else
5025 fprintf (file, "%d", i);
5026 return;
5028 case 'J':
5029 /* Similar, but add one for shift count in rlinm for scc and pass
5030 scc flag to `ccr_bit'. */
5031 i = ccr_bit (x, 1);
5032 if (i == -1)
5033 output_operand_lossage ("invalid %%J code");
5034 else
5035 /* If we want bit 31, write a shift count of zero, not 32. */
5036 fprintf (file, "%d", i == 31 ? 0 : i + 1);
5037 return;
5039 case 'k':
5040 /* X must be a constant. Write the 1's complement of the
5041 constant. */
5042 if (! INT_P (x))
5043 output_operand_lossage ("invalid %%k value");
5044 else
5045 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
5046 return;
5048 case 'K':
5049 /* X must be a symbolic constant on ELF. Write an
5050 expression suitable for an 'addi' that adds in the low 16
5051 bits of the MEM. */
5052 if (GET_CODE (x) != CONST)
5054 print_operand_address (file, x);
5055 fputs ("@l", file);
5057 else
5059 if (GET_CODE (XEXP (x, 0)) != PLUS
5060 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
5061 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
5062 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
5063 output_operand_lossage ("invalid %%K value");
5064 print_operand_address (file, XEXP (XEXP (x, 0), 0));
5065 fputs ("@l", file);
5066 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
5068 return;
5070 /* %l is output_asm_label. */
5072 case 'L':
5073 /* Write second word of DImode or DFmode reference. Works on register
5074 or non-indexed memory only. */
5075 if (GET_CODE (x) == REG)
5076 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
5077 else if (GET_CODE (x) == MEM)
5079 /* Handle possible auto-increment. Since it is pre-increment and
5080 we have already done it, we can just use an offset of word. */
5081 if (GET_CODE (XEXP (x, 0)) == PRE_INC
5082 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
5083 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
5084 UNITS_PER_WORD));
5085 else
5086 output_address (XEXP (adjust_address_nv (x, SImode,
5087 UNITS_PER_WORD),
5088 0));
5090 if (small_data_operand (x, GET_MODE (x)))
5091 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
5092 reg_names[SMALL_DATA_REG]);
5094 return;
5096 case 'm':
5097 /* MB value for a mask operand. */
5098 if (! mask_operand (x, VOIDmode))
5099 output_operand_lossage ("invalid %%m value");
5101 val = INT_LOWPART (x);
5103 /* If the high bit is set and the low bit is not, the value is zero.
5104 If the high bit is zero, the value is the first 1 bit we find from
5105 the left. */
5106 if ((val & 0x80000000) && ((val & 1) == 0))
5108 putc ('0', file);
5109 return;
5111 else if ((val & 0x80000000) == 0)
5113 for (i = 1; i < 32; i++)
5114 if ((val <<= 1) & 0x80000000)
5115 break;
5116 fprintf (file, "%d", i);
5117 return;
5120 /* Otherwise, look for the first 0 bit from the right. The result is its
5121 number plus 1. We know the low-order bit is one. */
5122 for (i = 0; i < 32; i++)
5123 if (((val >>= 1) & 1) == 0)
5124 break;
5126 /* If we ended in ...01, i would be 0. The correct value is 31, so
5127 we want 31 - i. */
5128 fprintf (file, "%d", 31 - i);
5129 return;
5131 case 'M':
5132 /* ME value for a mask operand. */
5133 if (! mask_operand (x, VOIDmode))
5134 output_operand_lossage ("invalid %%M value");
5136 val = INT_LOWPART (x);
5138 /* If the low bit is set and the high bit is not, the value is 31.
5139 If the low bit is zero, the value is the first 1 bit we find from
5140 the right. */
5141 if ((val & 1) && ((val & 0x80000000) == 0))
5143 fputs ("31", file);
5144 return;
5146 else if ((val & 1) == 0)
5148 for (i = 0; i < 32; i++)
5149 if ((val >>= 1) & 1)
5150 break;
5152 /* If we had ....10, i would be 0. The result should be
5153 30, so we need 30 - i. */
5154 fprintf (file, "%d", 30 - i);
5155 return;
5158 /* Otherwise, look for the first 0 bit from the left. The result is its
5159 number minus 1. We know the high-order bit is one. */
5160 for (i = 0; i < 32; i++)
5161 if (((val <<= 1) & 0x80000000) == 0)
5162 break;
5164 fprintf (file, "%d", i);
5165 return;
5167 /* %n outputs the negative of its operand. */
5169 case 'N':
5170 /* Write the number of elements in the vector times 4. */
5171 if (GET_CODE (x) != PARALLEL)
5172 output_operand_lossage ("invalid %%N value");
5173 else
5174 fprintf (file, "%d", XVECLEN (x, 0) * 4);
5175 return;
5177 case 'O':
5178 /* Similar, but subtract 1 first. */
5179 if (GET_CODE (x) != PARALLEL)
5180 output_operand_lossage ("invalid %%O value");
5181 else
5182 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
5183 return;
5185 case 'p':
5186 /* X is a CONST_INT that is a power of two. Output the logarithm. */
5187 if (! INT_P (x)
5188 || INT_LOWPART (x) < 0
5189 || (i = exact_log2 (INT_LOWPART (x))) < 0)
5190 output_operand_lossage ("invalid %%p value");
5191 else
5192 fprintf (file, "%d", i);
5193 return;
5195 case 'P':
5196 /* The operand must be an indirect memory reference. The result
5197 is the register number. */
5198 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
5199 || REGNO (XEXP (x, 0)) >= 32)
5200 output_operand_lossage ("invalid %%P value");
5201 else
5202 fprintf (file, "%d", REGNO (XEXP (x, 0)));
5203 return;
5205 case 'q':
5206 /* This outputs the logical code corresponding to a boolean
5207 expression. The expression may have one or both operands
5208 negated (if one, only the first one). For condition register
5209 logical operations, it will also treat the negated
5210 CR codes as NOTs, but not handle NOTs of them. */
5212 const char *const *t = 0;
5213 const char *s;
5214 enum rtx_code code = GET_CODE (x);
5215 static const char * const tbl[3][3] = {
5216 { "and", "andc", "nor" },
5217 { "or", "orc", "nand" },
5218 { "xor", "eqv", "xor" } };
5220 if (code == AND)
5221 t = tbl[0];
5222 else if (code == IOR)
5223 t = tbl[1];
5224 else if (code == XOR)
5225 t = tbl[2];
5226 else
5227 output_operand_lossage ("invalid %%q value");
5229 if (GET_CODE (XEXP (x, 0)) != NOT)
5230 s = t[0];
5231 else
5233 if (GET_CODE (XEXP (x, 1)) == NOT)
5234 s = t[2];
5235 else
5236 s = t[1];
5239 fputs (s, file);
5241 return;
5243 case 'R':
5244 /* X is a CR register. Print the mask for `mtcrf'. */
5245 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
5246 output_operand_lossage ("invalid %%R value");
5247 else
5248 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
5249 return;
5251 case 's':
5252 /* Low 5 bits of 32 - value */
5253 if (! INT_P (x))
5254 output_operand_lossage ("invalid %%s value");
5255 else
5256 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
5257 return;
5259 case 'S':
5260 /* PowerPC64 mask position. All 0's and all 1's are excluded.
5261 CONST_INT 32-bit mask is considered sign-extended so any
5262 transition must occur within the CONST_INT, not on the boundary. */
5263 if (! mask64_operand (x, VOIDmode))
5264 output_operand_lossage ("invalid %%S value");
5266 val = INT_LOWPART (x);
5268 if (val & 1) /* Clear Left */
5270 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
5271 if (!((val >>= 1) & 1))
5272 break;
5274 #if HOST_BITS_PER_WIDE_INT == 32
5275 if (GET_CODE (x) == CONST_DOUBLE && i == 32)
5277 val = CONST_DOUBLE_HIGH (x);
5279 if (val == 0)
5280 --i;
5281 else
5282 for (i = 32; i < 64; i++)
5283 if (!((val >>= 1) & 1))
5284 break;
5286 #endif
5287 /* i = index of last set bit from right
5288 mask begins at 63 - i from left */
5289 if (i > 63)
5290 output_operand_lossage ("%%S computed all 1's mask");
5292 fprintf (file, "%d", 63 - i);
5293 return;
5295 else /* Clear Right */
5297 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
5298 if ((val >>= 1) & 1)
5299 break;
5301 #if HOST_BITS_PER_WIDE_INT == 32
5302 if (GET_CODE (x) == CONST_DOUBLE && i == 32)
5304 val = CONST_DOUBLE_HIGH (x);
5306 if (val == (HOST_WIDE_INT) -1)
5307 --i;
5308 else
5309 for (i = 32; i < 64; i++)
5310 if ((val >>= 1) & 1)
5311 break;
5313 #endif
5314 /* i = index of last clear bit from right
5315 mask ends at 62 - i from left */
5316 if (i > 62)
5317 output_operand_lossage ("%%S computed all 0's mask");
5319 fprintf (file, "%d", 62 - i);
5320 return;
5323 case 'T':
5324 /* Print the symbolic name of a branch target register. */
5325 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
5326 && REGNO (x) != COUNT_REGISTER_REGNUM))
5327 output_operand_lossage ("invalid %%T value");
5328 else if (REGNO (x) == LINK_REGISTER_REGNUM)
5329 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
5330 else
5331 fputs ("ctr", file);
5332 return;
5334 case 'u':
5335 /* High-order 16 bits of constant for use in unsigned operand. */
5336 if (! INT_P (x))
5337 output_operand_lossage ("invalid %%u value");
5338 else
5339 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
5340 (INT_LOWPART (x) >> 16) & 0xffff);
5341 return;
5343 case 'v':
5344 /* High-order 16 bits of constant for use in signed operand. */
5345 if (! INT_P (x))
5346 output_operand_lossage ("invalid %%v value");
5347 else
5348 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
5349 (INT_LOWPART (x) >> 16) & 0xffff);
5350 return;
5352 case 'U':
5353 /* Print `u' if this has an auto-increment or auto-decrement. */
5354 if (GET_CODE (x) == MEM
5355 && (GET_CODE (XEXP (x, 0)) == PRE_INC
5356 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
5357 putc ('u', file);
5358 return;
5360 case 'V':
5361 /* Print the trap code for this operand. */
5362 switch (GET_CODE (x))
5364 case EQ:
5365 fputs ("eq", file); /* 4 */
5366 break;
5367 case NE:
5368 fputs ("ne", file); /* 24 */
5369 break;
5370 case LT:
5371 fputs ("lt", file); /* 16 */
5372 break;
5373 case LE:
5374 fputs ("le", file); /* 20 */
5375 break;
5376 case GT:
5377 fputs ("gt", file); /* 8 */
5378 break;
5379 case GE:
5380 fputs ("ge", file); /* 12 */
5381 break;
5382 case LTU:
5383 fputs ("llt", file); /* 2 */
5384 break;
5385 case LEU:
5386 fputs ("lle", file); /* 6 */
5387 break;
5388 case GTU:
5389 fputs ("lgt", file); /* 1 */
5390 break;
5391 case GEU:
5392 fputs ("lge", file); /* 5 */
5393 break;
5394 default:
5395 abort ();
5397 break;
5399 case 'w':
5400 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
5401 normally. */
5402 if (INT_P (x))
5403 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5404 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
5405 else
5406 print_operand (file, x, 0);
5407 return;
5409 case 'W':
5410 /* MB value for a PowerPC64 rldic operand. */
5411 val = (GET_CODE (x) == CONST_INT
5412 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
5414 if (val < 0)
5415 i = -1;
5416 else
5417 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
5418 if ((val <<= 1) < 0)
5419 break;
5421 #if HOST_BITS_PER_WIDE_INT == 32
5422 if (GET_CODE (x) == CONST_INT && i >= 0)
5423 i += 32; /* zero-extend high-part was all 0's */
5424 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
5426 val = CONST_DOUBLE_LOW (x);
5428 if (val == 0)
5429 abort ();
5430 else if (val < 0)
5431 --i;
5432 else
5433 for ( ; i < 64; i++)
5434 if ((val <<= 1) < 0)
5435 break;
5437 #endif
5439 fprintf (file, "%d", i + 1);
5440 return;
5442 case 'X':
5443 if (GET_CODE (x) == MEM
5444 && LEGITIMATE_INDEXED_ADDRESS_P (XEXP (x, 0), 0))
5445 putc ('x', file);
5446 return;
5448 case 'Y':
5449 /* Like 'L', for third word of TImode */
5450 if (GET_CODE (x) == REG)
5451 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
5452 else if (GET_CODE (x) == MEM)
5454 if (GET_CODE (XEXP (x, 0)) == PRE_INC
5455 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
5456 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
5457 else
5458 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
5459 if (small_data_operand (x, GET_MODE (x)))
5460 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
5461 reg_names[SMALL_DATA_REG]);
5463 return;
5465 case 'z':
5466 /* X is a SYMBOL_REF. Write out the name preceded by a
5467 period and without any trailing data in brackets. Used for function
5468 names. If we are configured for System V (or the embedded ABI) on
5469 the PowerPC, do not emit the period, since those systems do not use
5470 TOCs and the like. */
5471 if (GET_CODE (x) != SYMBOL_REF)
5472 abort ();
5474 if (XSTR (x, 0)[0] != '.')
5476 switch (DEFAULT_ABI)
5478 default:
5479 abort ();
5481 case ABI_AIX:
5482 putc ('.', file);
5483 break;
5485 case ABI_V4:
5486 case ABI_AIX_NODESC:
5487 case ABI_DARWIN:
5488 break;
5491 #if TARGET_AIX
5492 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
5493 #else
5494 assemble_name (file, XSTR (x, 0));
5495 #endif
5496 return;
5498 case 'Z':
5499 /* Like 'L', for last word of TImode. */
5500 if (GET_CODE (x) == REG)
5501 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
5502 else if (GET_CODE (x) == MEM)
5504 if (GET_CODE (XEXP (x, 0)) == PRE_INC
5505 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
5506 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
5507 else
5508 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
5509 if (small_data_operand (x, GET_MODE (x)))
5510 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
5511 reg_names[SMALL_DATA_REG]);
5513 return;
5515 /* Print AltiVec memory operand. */
5516 case 'y':
5518 rtx tmp;
5520 if (GET_CODE (x) != MEM)
5521 abort ();
5523 tmp = XEXP (x, 0);
5525 if (GET_CODE (tmp) == REG)
5526 fprintf (file, "0, %s", reg_names[REGNO (tmp)]);
5527 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
5529 if (REGNO (XEXP (tmp, 0)) == 0)
5530 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
5531 reg_names[ REGNO (XEXP (tmp, 0)) ]);
5532 else
5533 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
5534 reg_names[ REGNO (XEXP (tmp, 1)) ]);
5536 else
5537 abort ();
5538 break;
5541 case 0:
5542 if (GET_CODE (x) == REG)
5543 fprintf (file, "%s", reg_names[REGNO (x)]);
5544 else if (GET_CODE (x) == MEM)
5546 /* We need to handle PRE_INC and PRE_DEC here, since we need to
5547 know the width from the mode. */
5548 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
5549 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
5550 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
5551 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
5552 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
5553 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
5554 else
5555 output_address (XEXP (x, 0));
5557 else
5558 output_addr_const (file, x);
5559 return;
5561 default:
5562 output_operand_lossage ("invalid %%xn code");
5566 /* Print the address of an operand. */
5568 void
5569 print_operand_address (file, x)
5570 FILE *file;
5571 rtx x;
5573 if (GET_CODE (x) == REG)
5574 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
5575 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
5576 || GET_CODE (x) == LABEL_REF)
5578 output_addr_const (file, x);
5579 if (small_data_operand (x, GET_MODE (x)))
5580 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
5581 reg_names[SMALL_DATA_REG]);
5582 else if (TARGET_TOC)
5583 abort ();
5585 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
5587 if (REGNO (XEXP (x, 0)) == 0)
5588 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
5589 reg_names[ REGNO (XEXP (x, 0)) ]);
5590 else
5591 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
5592 reg_names[ REGNO (XEXP (x, 1)) ]);
5594 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
5596 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (XEXP (x, 1)));
5597 fprintf (file, "(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
5599 #if TARGET_ELF
5600 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
5601 && CONSTANT_P (XEXP (x, 1)))
5603 output_addr_const (file, XEXP (x, 1));
5604 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
5606 #endif
5607 else if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
5609 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
5611 rtx contains_minus = XEXP (x, 1);
5612 rtx minus, symref;
5613 const char *name;
5615 /* Find the (minus (sym) (toc)) buried in X, and temporarily
5616 turn it into (sym) for output_addr_const. */
5617 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
5618 contains_minus = XEXP (contains_minus, 0);
5620 minus = XEXP (contains_minus, 0);
5621 symref = XEXP (minus, 0);
5622 XEXP (contains_minus, 0) = symref;
5623 if (TARGET_ELF)
5625 char *newname;
5627 name = XSTR (symref, 0);
5628 newname = alloca (strlen (name) + sizeof ("@toc"));
5629 strcpy (newname, name);
5630 strcat (newname, "@toc");
5631 XSTR (symref, 0) = newname;
5633 output_addr_const (file, XEXP (x, 1));
5634 if (TARGET_ELF)
5635 XSTR (symref, 0) = name;
5636 XEXP (contains_minus, 0) = minus;
5638 else
5639 output_addr_const (file, XEXP (x, 1));
5641 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
5643 else
5644 abort ();
5647 enum rtx_code
5648 rs6000_reverse_condition (mode, code)
5649 enum machine_mode mode;
5650 enum rtx_code code;
5652 /* Reversal of FP compares takes care -- an ordered compare
5653 becomes an unordered compare and vice versa. */
5654 if (mode == CCFPmode)
5655 return reverse_condition_maybe_unordered (code);
5656 else
5657 return reverse_condition (code);
5660 /* Generate a compare for CODE. Return a brand-new rtx that
5661 represents the result of the compare. */
5663 static rtx
5664 rs6000_generate_compare (code)
5665 enum rtx_code code;
5667 enum machine_mode comp_mode;
5668 rtx compare_result;
5670 if (rs6000_compare_fp_p)
5671 comp_mode = CCFPmode;
5672 else if (code == GTU || code == LTU
5673 || code == GEU || code == LEU)
5674 comp_mode = CCUNSmode;
5675 else
5676 comp_mode = CCmode;
5678 /* First, the compare. */
5679 compare_result = gen_reg_rtx (comp_mode);
5680 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
5681 gen_rtx_COMPARE (comp_mode,
5682 rs6000_compare_op0,
5683 rs6000_compare_op1)));
5685 /* Some kinds of FP comparisons need an OR operation;
5686 except for flag_unsafe_math_optimizations we don't bother. */
5687 if (rs6000_compare_fp_p
5688 && ! flag_unsafe_math_optimizations
5689 && (code == LE || code == GE
5690 || code == UNEQ || code == LTGT
5691 || code == UNGT || code == UNLT))
5693 enum rtx_code or1, or2;
5694 rtx or1_rtx, or2_rtx, compare2_rtx;
5695 rtx or_result = gen_reg_rtx (CCEQmode);
5697 switch (code)
5699 case LE: or1 = LT; or2 = EQ; break;
5700 case GE: or1 = GT; or2 = EQ; break;
5701 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
5702 case LTGT: or1 = LT; or2 = GT; break;
5703 case UNGT: or1 = UNORDERED; or2 = GT; break;
5704 case UNLT: or1 = UNORDERED; or2 = LT; break;
5705 default: abort ();
5707 validate_condition_mode (or1, comp_mode);
5708 validate_condition_mode (or2, comp_mode);
5709 or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
5710 or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
5711 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
5712 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
5713 const_true_rtx);
5714 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
5716 compare_result = or_result;
5717 code = EQ;
5720 validate_condition_mode (code, GET_MODE (compare_result));
5722 return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
5726 /* Emit the RTL for an sCOND pattern. */
5728 void
5729 rs6000_emit_sCOND (code, result)
5730 enum rtx_code code;
5731 rtx result;
5733 rtx condition_rtx;
5734 enum machine_mode op_mode;
5736 condition_rtx = rs6000_generate_compare (code);
5738 op_mode = GET_MODE (rs6000_compare_op0);
5739 if (op_mode == VOIDmode)
5740 op_mode = GET_MODE (rs6000_compare_op1);
5742 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
5744 PUT_MODE (condition_rtx, DImode);
5745 convert_move (result, condition_rtx, 0);
5747 else
5749 PUT_MODE (condition_rtx, SImode);
5750 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
5754 /* Emit a branch of kind CODE to location LOC. */
5756 void
5757 rs6000_emit_cbranch (code, loc)
5758 enum rtx_code code;
5759 rtx loc;
5761 rtx condition_rtx, loc_ref;
5763 condition_rtx = rs6000_generate_compare (code);
5764 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
5765 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
5766 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
5767 loc_ref, pc_rtx)));
5770 /* Return the string to output a conditional branch to LABEL, which is
5771 the operand number of the label, or -1 if the branch is really a
5772 conditional return.
5774 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
5775 condition code register and its mode specifies what kind of
5776 comparison we made.
5778 REVERSED is non-zero if we should reverse the sense of the comparison.
5780 INSN is the insn. */
5782 char *
5783 output_cbranch (op, label, reversed, insn)
5784 rtx op;
5785 const char * label;
5786 int reversed;
5787 rtx insn;
5789 static char string[64];
5790 enum rtx_code code = GET_CODE (op);
5791 rtx cc_reg = XEXP (op, 0);
5792 enum machine_mode mode = GET_MODE (cc_reg);
5793 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
5794 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
5795 int really_reversed = reversed ^ need_longbranch;
5796 char *s = string;
5797 const char *ccode;
5798 const char *pred;
5799 rtx note;
5801 validate_condition_mode (code, mode);
5803 /* Work out which way this really branches. We could use
5804 reverse_condition_maybe_unordered here always but this
5805 makes the resulting assembler clearer. */
5806 if (really_reversed)
5807 code = rs6000_reverse_condition (mode, code);
5809 switch (code)
5811 /* Not all of these are actually distinct opcodes, but
5812 we distinguish them for clarity of the resulting assembler. */
5813 case NE: case LTGT:
5814 ccode = "ne"; break;
5815 case EQ: case UNEQ:
5816 ccode = "eq"; break;
5817 case GE: case GEU:
5818 ccode = "ge"; break;
5819 case GT: case GTU: case UNGT:
5820 ccode = "gt"; break;
5821 case LE: case LEU:
5822 ccode = "le"; break;
5823 case LT: case LTU: case UNLT:
5824 ccode = "lt"; break;
5825 case UNORDERED: ccode = "un"; break;
5826 case ORDERED: ccode = "nu"; break;
5827 case UNGE: ccode = "nl"; break;
5828 case UNLE: ccode = "ng"; break;
5829 default:
5830 abort ();
5833 /* Maybe we have a guess as to how likely the branch is.
5834 The old mnemonics don't have a way to specify this information. */
5835 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
5836 if (note != NULL_RTX)
5838 /* PROB is the difference from 50%. */
5839 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
5841 /* For branches that are very close to 50%, assume not-taken. */
5842 if (abs (prob) > REG_BR_PROB_BASE / 20
5843 && ((prob > 0) ^ need_longbranch))
5844 pred = "+";
5845 else
5846 pred = "-";
5848 else
5849 pred = "";
5851 if (label == NULL)
5852 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
5853 else
5854 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
5856 /* We need to escape any '%' characters in the reg_names string.
5857 Assume they'd only be the first character... */
5858 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
5859 *s++ = '%';
5860 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
5862 if (label != NULL)
5864 /* If the branch distance was too far, we may have to use an
5865 unconditional branch to go the distance. */
5866 if (need_longbranch)
5867 s += sprintf (s, ",$+8\n\tb %s", label);
5868 else
5869 s += sprintf (s, ",%s", label);
5872 return string;
5875 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
5876 operands of the last comparison is nonzero/true, FALSE_COND if it
5877 is zero/false. Return 0 if the hardware has no such operation. */
5880 rs6000_emit_cmove (dest, op, true_cond, false_cond)
5881 rtx dest;
5882 rtx op;
5883 rtx true_cond;
5884 rtx false_cond;
5886 enum rtx_code code = GET_CODE (op);
5887 rtx op0 = rs6000_compare_op0;
5888 rtx op1 = rs6000_compare_op1;
5889 REAL_VALUE_TYPE c1;
5890 enum machine_mode mode = GET_MODE (op0);
5891 rtx temp;
5893 /* First, work out if the hardware can do this at all, or
5894 if it's too slow... */
5895 /* If the comparison is an integer one, since we only have fsel
5896 it'll be cheaper to use a branch. */
5897 if (! rs6000_compare_fp_p)
5898 return 0;
5900 /* Eliminate half of the comparisons by switching operands, this
5901 makes the remaining code simpler. */
5902 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
5903 || code == LTGT || code == LT)
5905 code = reverse_condition_maybe_unordered (code);
5906 temp = true_cond;
5907 true_cond = false_cond;
5908 false_cond = temp;
5911 /* UNEQ and LTGT take four instructions for a comparison with zero,
5912 it'll probably be faster to use a branch here too. */
5913 if (code == UNEQ)
5914 return 0;
5916 if (GET_CODE (op1) == CONST_DOUBLE)
5917 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
5919 /* We're going to try to implement comparions by performing
5920 a subtract, then comparing against zero. Unfortunately,
5921 Inf - Inf is NaN which is not zero, and so if we don't
5922 know that the the operand is finite and the comparison
5923 would treat EQ different to UNORDERED, we can't do it. */
5924 if (! flag_unsafe_math_optimizations
5925 && code != GT && code != UNGE
5926 && (GET_CODE (op1) != CONST_DOUBLE || target_isinf (c1))
5927 /* Constructs of the form (a OP b ? a : b) are safe. */
5928 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
5929 || (! rtx_equal_p (op0, true_cond)
5930 && ! rtx_equal_p (op1, true_cond))))
5931 return 0;
5932 /* At this point we know we can use fsel. */
5934 /* Reduce the comparison to a comparison against zero. */
5935 temp = gen_reg_rtx (mode);
5936 emit_insn (gen_rtx_SET (VOIDmode, temp,
5937 gen_rtx_MINUS (mode, op0, op1)));
5938 op0 = temp;
5939 op1 = CONST0_RTX (mode);
5941 /* If we don't care about NaNs we can reduce some of the comparisons
5942 down to faster ones. */
5943 if (flag_unsafe_math_optimizations)
5944 switch (code)
5946 case GT:
5947 code = LE;
5948 temp = true_cond;
5949 true_cond = false_cond;
5950 false_cond = temp;
5951 break;
5952 case UNGE:
5953 code = GE;
5954 break;
5955 case UNEQ:
5956 code = EQ;
5957 break;
5958 default:
5959 break;
5962 /* Now, reduce everything down to a GE. */
5963 switch (code)
5965 case GE:
5966 break;
5968 case LE:
5969 temp = gen_reg_rtx (mode);
5970 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (mode, op0)));
5971 op0 = temp;
5972 break;
5974 case ORDERED:
5975 temp = gen_reg_rtx (mode);
5976 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (mode, op0)));
5977 op0 = temp;
5978 break;
5980 case EQ:
5981 temp = gen_reg_rtx (mode);
5982 emit_insn (gen_rtx_SET (VOIDmode, temp,
5983 gen_rtx_NEG (mode,
5984 gen_rtx_ABS (mode, op0))));
5985 op0 = temp;
5986 break;
5988 case UNGE:
5989 temp = gen_reg_rtx (mode);
5990 emit_insn (gen_rtx_SET (VOIDmode, temp,
5991 gen_rtx_IF_THEN_ELSE (mode,
5992 gen_rtx_GE (VOIDmode,
5993 op0, op1),
5994 true_cond, false_cond)));
5995 false_cond = temp;
5996 true_cond = false_cond;
5998 temp = gen_reg_rtx (mode);
5999 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (mode, op0)));
6000 op0 = temp;
6001 break;
6003 case GT:
6004 temp = gen_reg_rtx (mode);
6005 emit_insn (gen_rtx_SET (VOIDmode, temp,
6006 gen_rtx_IF_THEN_ELSE (mode,
6007 gen_rtx_GE (VOIDmode,
6008 op0, op1),
6009 true_cond, false_cond)));
6010 true_cond = temp;
6011 false_cond = true_cond;
6013 temp = gen_reg_rtx (mode);
6014 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (mode, op0)));
6015 op0 = temp;
6016 break;
6018 default:
6019 abort ();
6022 emit_insn (gen_rtx_SET (VOIDmode, dest,
6023 gen_rtx_IF_THEN_ELSE (GET_MODE (dest),
6024 gen_rtx_GE (VOIDmode,
6025 op0, op1),
6026 true_cond, false_cond)));
6027 return 1;
6030 void
6031 rs6000_emit_minmax (dest, code, op0, op1)
6032 rtx dest;
6033 enum rtx_code code;
6034 rtx op0;
6035 rtx op1;
6037 enum machine_mode mode = GET_MODE (op0);
6038 rtx target;
6039 if (code == SMAX || code == UMAX)
6040 target = emit_conditional_move (dest, GE, op0, op1, mode,
6041 op0, op1, mode, 0);
6042 else
6043 target = emit_conditional_move (dest, GE, op0, op1, mode,
6044 op1, op0, mode, 0);
6045 if (target == NULL_RTX)
6046 abort ();
6047 if (target != dest)
6048 emit_move_insn (dest, target);
6051 /* This page contains routines that are used to determine what the
6052 function prologue and epilogue code will do and write them out. */
6054 /* Return the first fixed-point register that is required to be
6055 saved. 32 if none. */
6058 first_reg_to_save ()
6060 int first_reg;
6062 /* Find lowest numbered live register. */
6063 for (first_reg = 13; first_reg <= 31; first_reg++)
6064 if (regs_ever_live[first_reg]
6065 && (! call_used_regs[first_reg]
6066 || (first_reg == PIC_OFFSET_TABLE_REGNUM
6067 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
6068 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
6069 break;
6071 if (profile_flag)
6073 /* AIX must save/restore every register that contains a parameter
6074 before/after the .__mcount call plus an additional register
6075 for the static chain, if needed; use registers from 30 down to 22
6076 to do this. */
6077 if (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
6079 int last_parm_reg, profile_first_reg;
6081 /* Figure out last used parameter register. The proper thing
6082 to do is to walk incoming args of the function. A function
6083 might have live parameter registers even if it has no
6084 incoming args. */
6085 for (last_parm_reg = 10;
6086 last_parm_reg > 2 && ! regs_ever_live [last_parm_reg];
6087 last_parm_reg--)
6090 /* Calculate first reg for saving parameter registers
6091 and static chain.
6092 Skip reg 31 which may contain the frame pointer. */
6093 profile_first_reg = (33 - last_parm_reg
6094 - (current_function_needs_context ? 1 : 0));
6095 #if TARGET_MACHO
6096 /* Need to skip another reg to account for R31 being PICBASE
6097 (when flag_pic is set) or R30 being used as the frame
6098 pointer (when flag_pic is not set). */
6099 --profile_first_reg;
6100 #endif
6101 /* Do not save frame pointer if no parameters needs to be saved. */
6102 if (profile_first_reg == 31)
6103 profile_first_reg = 32;
6105 if (first_reg > profile_first_reg)
6106 first_reg = profile_first_reg;
6109 /* SVR4 may need one register to preserve the static chain. */
6110 else if (current_function_needs_context)
6112 /* Skip reg 31 which may contain the frame pointer. */
6113 if (first_reg > 30)
6114 first_reg = 30;
6118 #if TARGET_MACHO
6119 if (flag_pic && current_function_uses_pic_offset_table &&
6120 (first_reg > PIC_OFFSET_TABLE_REGNUM))
6121 return PIC_OFFSET_TABLE_REGNUM;
6122 #endif
6124 return first_reg;
6127 /* Similar, for FP regs. */
6130 first_fp_reg_to_save ()
6132 int first_reg;
6134 /* Find lowest numbered live register. */
6135 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
6136 if (regs_ever_live[first_reg])
6137 break;
6139 return first_reg;
6142 /* Similar, for AltiVec regs. */
6144 static int
6145 first_altivec_reg_to_save ()
6147 int i;
6149 /* Stack frame remains as is unless we are in AltiVec ABI. */
6150 if (! TARGET_ALTIVEC_ABI)
6151 return LAST_ALTIVEC_REGNO + 1;
6153 /* Find lowest numbered live register. */
6154 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
6155 if (regs_ever_live[i])
6156 break;
6158 return i;
6161 /* Return a 32-bit mask of the AltiVec registers we need to set in
6162 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
6163 the 32-bit word is 0. */
6165 static unsigned int
6166 compute_vrsave_mask ()
6168 unsigned int i, mask = 0;
6170 /* First, find out if we use _any_ altivec registers. */
6171 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
6172 if (regs_ever_live[i])
6173 mask |= ALTIVEC_REG_BIT (i);
6175 if (mask == 0)
6176 return mask;
6178 /* Next, add all registers that are call-clobbered. We do this
6179 because post-reload register optimizers such as regrename_optimize
6180 may choose to use them. They never change the register class
6181 chosen by reload, so cannot create new uses of altivec registers
6182 if there were none before, so the early exit above is safe. */
6183 /* ??? Alternately, we could define HARD_REGNO_RENAME_OK to disallow
6184 altivec registers not saved in the mask, which might well make the
6185 adjustments below more effective in eliding the save/restore of
6186 VRSAVE in small functions. */
6187 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
6188 if (call_used_regs[i])
6189 mask |= ALTIVEC_REG_BIT (i);
6191 /* Next, remove the argument registers from the set. These must
6192 be in the VRSAVE mask set by the caller, so we don't need to add
6193 them in again. More importantly, the mask we compute here is
6194 used to generate CLOBBERs in the set_vrsave insn, and we do not
6195 wish the argument registers to die. */
6196 for (i = cfun->args_info.vregno; i >= ALTIVEC_ARG_MIN_REG; --i)
6197 mask &= ~ALTIVEC_REG_BIT (i);
6199 /* Similarly, remove the return value from the set. */
6201 bool yes = false;
6202 diddle_return_value (is_altivec_return_reg, &yes);
6203 if (yes)
6204 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
6207 return mask;
6210 static void
6211 is_altivec_return_reg (reg, xyes)
6212 rtx reg;
6213 void *xyes;
6215 bool *yes = (bool *) xyes;
6216 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
6217 *yes = true;
6221 /* Calculate the stack information for the current function. This is
6222 complicated by having two separate calling sequences, the AIX calling
6223 sequence and the V.4 calling sequence.
6225 AIX (and Darwin/Mac OS X) stack frames look like:
6226 32-bit 64-bit
6227 SP----> +---------------------------------------+
6228 | back chain to caller | 0 0
6229 +---------------------------------------+
6230 | saved CR | 4 8 (8-11)
6231 +---------------------------------------+
6232 | saved LR | 8 16
6233 +---------------------------------------+
6234 | reserved for compilers | 12 24
6235 +---------------------------------------+
6236 | reserved for binders | 16 32
6237 +---------------------------------------+
6238 | saved TOC pointer | 20 40
6239 +---------------------------------------+
6240 | Parameter save area (P) | 24 48
6241 +---------------------------------------+
6242 | Alloca space (A) | 24+P etc.
6243 +---------------------------------------+
6244 | Local variable space (L) | 24+P+A
6245 +---------------------------------------+
6246 | Float/int conversion temporary (X) | 24+P+A+L
6247 +---------------------------------------+
6248 | Save area for AltiVec registers (W) | 24+P+A+L+X
6249 +---------------------------------------+
6250 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
6251 +---------------------------------------+
6252 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
6253 +---------------------------------------+
6254 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
6255 +---------------------------------------+
6256 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
6257 +---------------------------------------+
6258 old SP->| back chain to caller's caller |
6259 +---------------------------------------+
6261 The required alignment for AIX configurations is two words (i.e., 8
6262 or 16 bytes).
6265 V.4 stack frames look like:
6267 SP----> +---------------------------------------+
6268 | back chain to caller | 0
6269 +---------------------------------------+
6270 | caller's saved LR | 4
6271 +---------------------------------------+
6272 | Parameter save area (P) | 8
6273 +---------------------------------------+
6274 | Alloca space (A) | 8+P
6275 +---------------------------------------+
6276 | Varargs save area (V) | 8+P+A
6277 +---------------------------------------+
6278 | Local variable space (L) | 8+P+A+V
6279 +---------------------------------------+
6280 | Float/int conversion temporary (X) | 8+P+A+V+L
6281 +---------------------------------------+
6282 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
6283 +---------------------------------------+
6284 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
6285 +---------------------------------------+
6286 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
6287 +---------------------------------------+
6288 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
6289 +---------------------------------------+
6290 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
6291 +---------------------------------------+
6292 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
6293 +---------------------------------------+
6294 old SP->| back chain to caller's caller |
6295 +---------------------------------------+
6297 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
6298 given. (But note below and in sysv4.h that we require only 8 and
6299 may round up the size of our stack frame anyways. The historical
6300 reason is early versions of powerpc-linux which didn't properly
6301 align the stack at program startup. A happy side-effect is that
6302 -mno-eabi libraries can be used with -meabi programs.)
6304 The EABI configuration defaults to the V.4 layout, unless
6305 -mcall-aix is used, in which case the AIX layout is used. However,
6306 the stack alignment requirements may differ. If -mno-eabi is not
6307 given, the required stack alignment is 8 bytes; if -mno-eabi is
6308 given, the required alignment is 16 bytes. (But see V.4 comment
6309 above.) */
6311 #ifndef ABI_STACK_BOUNDARY
6312 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
6313 #endif
6315 rs6000_stack_t *
6316 rs6000_stack_info ()
6318 static rs6000_stack_t info, zero_info;
6319 rs6000_stack_t *info_ptr = &info;
6320 int reg_size = TARGET_POWERPC64 ? 8 : 4;
6321 enum rs6000_abi abi;
6322 int ehrd_size;
6323 int total_raw_size;
6325 /* Zero all fields portably. */
6326 info = zero_info;
6328 /* Select which calling sequence. */
6329 info_ptr->abi = abi = DEFAULT_ABI;
6331 /* Calculate which registers need to be saved & save area size. */
6332 info_ptr->first_gp_reg_save = first_reg_to_save ();
6333 /* Assume that we will have to save PIC_OFFSET_TABLE_REGNUM,
6334 even if it currently looks like we won't. */
6335 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
6336 || (flag_pic == 1 && abi == ABI_V4)
6337 || (flag_pic && abi == ABI_DARWIN))
6338 && info_ptr->first_gp_reg_save > PIC_OFFSET_TABLE_REGNUM)
6339 info_ptr->gp_size = reg_size * (32 - PIC_OFFSET_TABLE_REGNUM);
6340 else
6341 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
6343 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
6344 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
6346 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
6347 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
6348 - info_ptr->first_altivec_reg_save);
6350 /* Does this function call anything? */
6351 info_ptr->calls_p = (! current_function_is_leaf
6352 || cfun->machine->ra_needs_full_frame);
6354 /* Determine if we need to save the link register. */
6355 if (rs6000_ra_ever_killed ()
6356 || (DEFAULT_ABI == ABI_AIX && profile_flag)
6357 #ifdef TARGET_RELOCATABLE
6358 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
6359 #endif
6360 || (info_ptr->first_fp_reg_save != 64
6361 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
6362 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
6363 || (abi == ABI_V4 && current_function_calls_alloca)
6364 || (DEFAULT_ABI == ABI_DARWIN
6365 && flag_pic
6366 && current_function_uses_pic_offset_table)
6367 || info_ptr->calls_p)
6369 info_ptr->lr_save_p = 1;
6370 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
6373 /* Determine if we need to save the condition code registers. */
6374 if (regs_ever_live[CR2_REGNO]
6375 || regs_ever_live[CR3_REGNO]
6376 || regs_ever_live[CR4_REGNO])
6378 info_ptr->cr_save_p = 1;
6379 if (abi == ABI_V4)
6380 info_ptr->cr_size = reg_size;
6383 /* If the current function calls __builtin_eh_return, then we need
6384 to allocate stack space for registers that will hold data for
6385 the exception handler. */
6386 if (current_function_calls_eh_return)
6388 unsigned int i;
6389 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
6390 continue;
6391 ehrd_size = i * UNITS_PER_WORD;
6393 else
6394 ehrd_size = 0;
6396 /* Determine various sizes. */
6397 info_ptr->reg_size = reg_size;
6398 info_ptr->fixed_size = RS6000_SAVE_AREA;
6399 info_ptr->varargs_size = RS6000_VARARGS_AREA;
6400 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
6401 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
6404 if (TARGET_ALTIVEC_ABI)
6406 info_ptr->vrsave_mask = compute_vrsave_mask ();
6407 info_ptr->vrsave_size = info_ptr->vrsave_mask ? 4 : 0;
6409 else
6411 info_ptr->vrsave_mask = 0;
6412 info_ptr->vrsave_size = 0;
6415 /* Calculate the offsets. */
6416 switch (abi)
6418 case ABI_NONE:
6419 default:
6420 abort ();
6422 case ABI_AIX:
6423 case ABI_AIX_NODESC:
6424 case ABI_DARWIN:
6425 info_ptr->fp_save_offset = - info_ptr->fp_size;
6426 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
6428 if (TARGET_ALTIVEC_ABI)
6430 info_ptr->vrsave_save_offset
6431 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
6433 /* Align stack so vector save area is on a quadword boundary. */
6434 if (info_ptr->altivec_size != 0)
6435 info_ptr->altivec_padding_size
6436 = 16 - (-info_ptr->vrsave_save_offset % 16);
6437 else
6438 info_ptr->altivec_padding_size = 0;
6440 info_ptr->altivec_save_offset
6441 = info_ptr->vrsave_save_offset
6442 - info_ptr->altivec_padding_size
6443 - info_ptr->altivec_size;
6445 /* Adjust for AltiVec case. */
6446 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
6448 else
6449 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
6450 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
6451 info_ptr->lr_save_offset = 2*reg_size;
6452 break;
6454 case ABI_V4:
6455 info_ptr->fp_save_offset = - info_ptr->fp_size;
6456 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
6457 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
6459 if (TARGET_ALTIVEC_ABI)
6461 info_ptr->vrsave_save_offset
6462 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
6464 /* Align stack so vector save area is on a quadword boundary. */
6465 if (info_ptr->altivec_size != 0)
6466 info_ptr->altivec_padding_size
6467 = 16 - (-info_ptr->vrsave_save_offset % 16);
6468 else
6469 info_ptr->altivec_padding_size = 0;
6471 info_ptr->altivec_save_offset
6472 = info_ptr->vrsave_save_offset
6473 - info_ptr->altivec_padding_size
6474 - info_ptr->altivec_size;
6476 /* Adjust for AltiVec case. */
6477 info_ptr->toc_save_offset
6478 = info_ptr->altivec_save_offset - info_ptr->toc_size;
6480 else
6481 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
6482 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
6483 info_ptr->lr_save_offset = reg_size;
6484 break;
6487 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
6488 + info_ptr->gp_size
6489 + info_ptr->altivec_size
6490 + info_ptr->altivec_padding_size
6491 + info_ptr->vrsave_size
6492 + ehrd_size
6493 + info_ptr->cr_size
6494 + info_ptr->lr_size
6495 + info_ptr->vrsave_size
6496 + info_ptr->toc_size,
6497 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
6498 ? 16 : 8);
6500 total_raw_size = (info_ptr->vars_size
6501 + info_ptr->parm_size
6502 + info_ptr->save_size
6503 + info_ptr->varargs_size
6504 + info_ptr->fixed_size);
6506 info_ptr->total_size =
6507 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
6509 /* Determine if we need to allocate any stack frame:
6511 For AIX we need to push the stack if a frame pointer is needed
6512 (because the stack might be dynamically adjusted), if we are
6513 debugging, if we make calls, or if the sum of fp_save, gp_save,
6514 and local variables are more than the space needed to save all
6515 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
6516 + 18*8 = 288 (GPR13 reserved).
6518 For V.4 we don't have the stack cushion that AIX uses, but assume
6519 that the debugger can handle stackless frames. */
6521 if (info_ptr->calls_p)
6522 info_ptr->push_p = 1;
6524 else if (abi == ABI_V4)
6525 info_ptr->push_p = (total_raw_size > info_ptr->fixed_size
6526 || info_ptr->calls_p);
6528 else
6529 info_ptr->push_p = (frame_pointer_needed
6530 || (abi != ABI_DARWIN && write_symbols != NO_DEBUG)
6531 || ((total_raw_size - info_ptr->fixed_size)
6532 > (TARGET_32BIT ? 220 : 288)));
6534 /* Zero offsets if we're not saving those registers. */
6535 if (info_ptr->fp_size == 0)
6536 info_ptr->fp_save_offset = 0;
6538 if (info_ptr->gp_size == 0)
6539 info_ptr->gp_save_offset = 0;
6541 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
6542 info_ptr->altivec_save_offset = 0;
6544 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
6545 info_ptr->vrsave_save_offset = 0;
6547 if (! info_ptr->lr_save_p)
6548 info_ptr->lr_save_offset = 0;
6550 if (! info_ptr->cr_save_p)
6551 info_ptr->cr_save_offset = 0;
6553 if (! info_ptr->toc_save_p)
6554 info_ptr->toc_save_offset = 0;
6556 return info_ptr;
6559 void
6560 debug_stack_info (info)
6561 rs6000_stack_t *info;
6563 const char *abi_string;
6565 if (! info)
6566 info = rs6000_stack_info ();
6568 fprintf (stderr, "\nStack information for function %s:\n",
6569 ((current_function_decl && DECL_NAME (current_function_decl))
6570 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
6571 : "<unknown>"));
6573 switch (info->abi)
6575 default: abi_string = "Unknown"; break;
6576 case ABI_NONE: abi_string = "NONE"; break;
6577 case ABI_AIX:
6578 case ABI_AIX_NODESC: abi_string = "AIX"; break;
6579 case ABI_DARWIN: abi_string = "Darwin"; break;
6580 case ABI_V4: abi_string = "V.4"; break;
6583 fprintf (stderr, "\tABI = %5s\n", abi_string);
6585 if (TARGET_ALTIVEC_ABI)
6586 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
6588 if (info->first_gp_reg_save != 32)
6589 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
6591 if (info->first_fp_reg_save != 64)
6592 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
6594 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
6595 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
6596 info->first_altivec_reg_save);
6598 if (info->lr_save_p)
6599 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
6601 if (info->cr_save_p)
6602 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
6604 if (info->toc_save_p)
6605 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
6607 if (info->vrsave_mask)
6608 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
6610 if (info->push_p)
6611 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
6613 if (info->calls_p)
6614 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
6616 if (info->gp_save_offset)
6617 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
6619 if (info->fp_save_offset)
6620 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
6622 if (info->altivec_save_offset)
6623 fprintf (stderr, "\taltivec_save_offset = %5d\n",
6624 info->altivec_save_offset);
6626 if (info->vrsave_save_offset)
6627 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
6628 info->vrsave_save_offset);
6630 if (info->lr_save_offset)
6631 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
6633 if (info->cr_save_offset)
6634 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
6636 if (info->toc_save_offset)
6637 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
6639 if (info->varargs_save_offset)
6640 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
6642 if (info->total_size)
6643 fprintf (stderr, "\ttotal_size = %5d\n", info->total_size);
6645 if (info->varargs_size)
6646 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
6648 if (info->vars_size)
6649 fprintf (stderr, "\tvars_size = %5d\n", info->vars_size);
6651 if (info->parm_size)
6652 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
6654 if (info->fixed_size)
6655 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
6657 if (info->gp_size)
6658 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
6660 if (info->fp_size)
6661 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
6663 if (info->altivec_size)
6664 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
6666 if (info->vrsave_size)
6667 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
6669 if (info->altivec_padding_size)
6670 fprintf (stderr, "\taltivec_padding_size= %5d\n",
6671 info->altivec_padding_size);
6673 if (info->lr_size)
6674 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
6676 if (info->cr_size)
6677 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
6679 if (info->toc_size)
6680 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
6682 if (info->save_size)
6683 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
6685 if (info->reg_size != 4)
6686 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
6688 fprintf (stderr, "\n");
6692 rs6000_return_addr (count, frame)
6693 int count;
6694 rtx frame;
6696 /* Currently we don't optimize very well between prolog and body
6697 code and for PIC code the code can be actually quite bad, so
6698 don't try to be too clever here. */
6699 if (count != 0
6700 || flag_pic != 0
6701 || DEFAULT_ABI == ABI_AIX
6702 || DEFAULT_ABI == ABI_AIX_NODESC)
6704 cfun->machine->ra_needs_full_frame = 1;
6706 return
6707 gen_rtx_MEM
6708 (Pmode,
6709 memory_address
6710 (Pmode,
6711 plus_constant (copy_to_reg
6712 (gen_rtx_MEM (Pmode,
6713 memory_address (Pmode, frame))),
6714 RETURN_ADDRESS_OFFSET)));
6717 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
6720 static int
6721 rs6000_ra_ever_killed ()
6723 rtx top;
6725 #ifdef ASM_OUTPUT_MI_THUNK
6726 if (current_function_is_thunk)
6727 return 0;
6728 #endif
6729 if (!has_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM)
6730 || cfun->machine->ra_needs_full_frame)
6731 return regs_ever_live[LINK_REGISTER_REGNUM];
6733 push_topmost_sequence ();
6734 top = get_insns ();
6735 pop_topmost_sequence ();
6737 return reg_set_between_p (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
6738 top, NULL_RTX);
6741 /* Add a REG_MAYBE_DEAD note to the insn. */
6742 static void
6743 rs6000_maybe_dead (insn)
6744 rtx insn;
6746 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
6747 const0_rtx,
6748 REG_NOTES (insn));
6751 /* Emit instructions needed to load the TOC register.
6752 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
6753 a constant pool; or for SVR4 -fpic. */
6755 void
6756 rs6000_emit_load_toc_table (fromprolog)
6757 int fromprolog;
6759 rtx dest;
6760 dest = gen_rtx_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
6762 if (TARGET_ELF && DEFAULT_ABI != ABI_AIX)
6764 if (DEFAULT_ABI == ABI_V4 && flag_pic == 1)
6766 rtx temp = (fromprolog
6767 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
6768 : gen_reg_rtx (Pmode));
6769 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_pic_si (temp)));
6770 rs6000_maybe_dead (emit_move_insn (dest, temp));
6772 else if (flag_pic == 2)
6774 char buf[30];
6775 rtx tempLR = (fromprolog
6776 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
6777 : gen_reg_rtx (Pmode));
6778 rtx temp0 = (fromprolog
6779 ? gen_rtx_REG (Pmode, 0)
6780 : gen_reg_rtx (Pmode));
6781 rtx symF;
6783 /* possibly create the toc section */
6784 if (! toc_initialized)
6786 toc_section ();
6787 function_section (current_function_decl);
6790 if (fromprolog)
6792 rtx symL;
6794 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
6795 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
6797 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
6798 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
6800 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
6801 symF)));
6802 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
6803 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
6804 symL,
6805 symF)));
6807 else
6809 rtx tocsym;
6810 static int reload_toc_labelno = 0;
6812 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
6814 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
6815 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
6817 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1b (tempLR,
6818 symF,
6819 tocsym)));
6820 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
6821 rs6000_maybe_dead (emit_move_insn (temp0,
6822 gen_rtx_MEM (Pmode, dest)));
6824 rs6000_maybe_dead (emit_insn (gen_addsi3 (dest, temp0, dest)));
6826 else if (flag_pic == 0 && TARGET_MINIMAL_TOC)
6828 /* This is for AIX code running in non-PIC ELF. */
6829 char buf[30];
6830 rtx realsym;
6831 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
6832 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
6834 rs6000_maybe_dead (emit_insn (gen_elf_high (dest, realsym)));
6835 rs6000_maybe_dead (emit_insn (gen_elf_low (dest, dest, realsym)));
6837 else
6838 abort ();
6840 else
6842 if (TARGET_32BIT)
6843 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_si (dest)));
6844 else
6845 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_di (dest)));
6849 int
6850 get_TOC_alias_set ()
6852 static int set = -1;
6853 if (set == -1)
6854 set = new_alias_set ();
6855 return set;
6858 /* This retuns nonzero if the current function uses the TOC. This is
6859 determined by the presence of (unspec ... 7), which is generated by
6860 the various load_toc_* patterns. */
6863 uses_TOC ()
6865 rtx insn;
6867 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6868 if (INSN_P (insn))
6870 rtx pat = PATTERN (insn);
6871 int i;
6873 if (GET_CODE (pat) == PARALLEL)
6874 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
6875 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == UNSPEC
6876 && XINT (XVECEXP (PATTERN (insn), 0, i), 1) == 7)
6877 return 1;
6879 return 0;
6883 create_TOC_reference (symbol)
6884 rtx symbol;
6886 return gen_rtx_PLUS (Pmode,
6887 gen_rtx_REG (Pmode, TOC_REGISTER),
6888 gen_rtx_CONST (Pmode,
6889 gen_rtx_MINUS (Pmode, symbol,
6890 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
6893 #if TARGET_AIX
6894 /* __throw will restore its own return address to be the same as the
6895 return address of the function that the throw is being made to.
6896 This is unfortunate, because we want to check the original
6897 return address to see if we need to restore the TOC.
6898 So we have to squirrel it away here.
6899 This is used only in compiling __throw and __rethrow.
6901 Most of this code should be removed by CSE. */
6902 static rtx insn_after_throw;
6904 /* This does the saving... */
6905 void
6906 rs6000_aix_emit_builtin_unwind_init ()
6908 rtx mem;
6909 rtx stack_top = gen_reg_rtx (Pmode);
6910 rtx opcode_addr = gen_reg_rtx (Pmode);
6912 insn_after_throw = gen_reg_rtx (SImode);
6914 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
6915 emit_move_insn (stack_top, mem);
6917 mem = gen_rtx_MEM (Pmode,
6918 gen_rtx_PLUS (Pmode, stack_top,
6919 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
6920 emit_move_insn (opcode_addr, mem);
6921 emit_move_insn (insn_after_throw, gen_rtx_MEM (SImode, opcode_addr));
6924 /* Emit insns to _restore_ the TOC register, at runtime (specifically
6925 in _eh.o). Only used on AIX.
6927 The idea is that on AIX, function calls look like this:
6928 bl somefunction-trampoline
6929 lwz r2,20(sp)
6931 and later,
6932 somefunction-trampoline:
6933 stw r2,20(sp)
6934 ... load function address in the count register ...
6935 bctr
6936 or like this, if the linker determines that this is not a cross-module call
6937 and so the TOC need not be restored:
6938 bl somefunction
6940 or like this, if the compiler could determine that this is not a
6941 cross-module call:
6942 bl somefunction
6943 now, the tricky bit here is that register 2 is saved and restored
6944 by the _linker_, so we can't readily generate debugging information
6945 for it. So we need to go back up the call chain looking at the
6946 insns at return addresses to see which calls saved the TOC register
6947 and so see where it gets restored from.
6949 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
6950 just before the actual epilogue.
6952 On the bright side, this incurs no space or time overhead unless an
6953 exception is thrown, except for the extra code in libgcc.a.
6955 The parameter STACKSIZE is a register containing (at runtime)
6956 the amount to be popped off the stack in addition to the stack frame
6957 of this routine (which will be __throw or __rethrow, and so is
6958 guaranteed to have a stack frame). */
6960 void
6961 rs6000_emit_eh_toc_restore (stacksize)
6962 rtx stacksize;
6964 rtx top_of_stack;
6965 rtx bottom_of_stack = gen_reg_rtx (Pmode);
6966 rtx tocompare = gen_reg_rtx (SImode);
6967 rtx opcode = gen_reg_rtx (SImode);
6968 rtx opcode_addr = gen_reg_rtx (Pmode);
6969 rtx mem;
6970 rtx loop_start = gen_label_rtx ();
6971 rtx no_toc_restore_needed = gen_label_rtx ();
6972 rtx loop_exit = gen_label_rtx ();
6974 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
6975 set_mem_alias_set (mem, rs6000_sr_alias_set);
6976 emit_move_insn (bottom_of_stack, mem);
6978 top_of_stack = expand_binop (Pmode, add_optab,
6979 bottom_of_stack, stacksize,
6980 NULL_RTX, 1, OPTAB_WIDEN);
6982 emit_move_insn (tocompare,
6983 GEN_INT (trunc_int_for_mode (TARGET_32BIT
6984 ? 0x80410014
6985 : 0xE8410028, SImode)));
6987 if (insn_after_throw == NULL_RTX)
6988 abort ();
6989 emit_move_insn (opcode, insn_after_throw);
6991 emit_note (NULL, NOTE_INSN_LOOP_BEG);
6992 emit_label (loop_start);
6994 do_compare_rtx_and_jump (opcode, tocompare, NE, 1,
6995 SImode, NULL_RTX, NULL_RTX,
6996 no_toc_restore_needed);
6998 mem = gen_rtx_MEM (Pmode,
6999 gen_rtx_PLUS (Pmode, bottom_of_stack,
7000 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
7001 emit_move_insn (gen_rtx_REG (Pmode, 2), mem);
7003 emit_label (no_toc_restore_needed);
7004 do_compare_rtx_and_jump (top_of_stack, bottom_of_stack, EQ, 1,
7005 Pmode, NULL_RTX, NULL_RTX,
7006 loop_exit);
7008 mem = gen_rtx_MEM (Pmode, bottom_of_stack);
7009 set_mem_alias_set (mem, rs6000_sr_alias_set);
7010 emit_move_insn (bottom_of_stack, mem);
7012 mem = gen_rtx_MEM (Pmode,
7013 gen_rtx_PLUS (Pmode, bottom_of_stack,
7014 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
7015 emit_move_insn (opcode_addr, mem);
7016 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
7018 emit_note (NULL, NOTE_INSN_LOOP_CONT);
7019 emit_jump (loop_start);
7020 emit_note (NULL, NOTE_INSN_LOOP_END);
7021 emit_label (loop_exit);
7023 #endif /* TARGET_AIX */
7025 /* This ties together stack memory (MEM with an alias set of
7026 rs6000_sr_alias_set) and the change to the stack pointer. */
7028 static void
7029 rs6000_emit_stack_tie ()
7031 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
7033 set_mem_alias_set (mem, rs6000_sr_alias_set);
7034 emit_insn (gen_stack_tie (mem));
7037 /* Emit the correct code for allocating stack space, as insns.
7038 If COPY_R12, make sure a copy of the old frame is left in r12.
7039 The generated code may use hard register 0 as a temporary. */
7041 static void
7042 rs6000_emit_allocate_stack (size, copy_r12)
7043 HOST_WIDE_INT size;
7044 int copy_r12;
7046 rtx insn;
7047 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
7048 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
7049 rtx todec = GEN_INT (-size);
7051 if (current_function_limit_stack)
7053 if (REG_P (stack_limit_rtx)
7054 && REGNO (stack_limit_rtx) > 1
7055 && REGNO (stack_limit_rtx) <= 31)
7057 emit_insn (Pmode == SImode
7058 ? gen_addsi3 (tmp_reg,
7059 stack_limit_rtx,
7060 GEN_INT (size))
7061 : gen_adddi3 (tmp_reg,
7062 stack_limit_rtx,
7063 GEN_INT (size)));
7065 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
7066 const0_rtx));
7068 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
7069 && TARGET_32BIT
7070 && DEFAULT_ABI == ABI_V4)
7072 rtx toload = gen_rtx_CONST (VOIDmode,
7073 gen_rtx_PLUS (Pmode,
7074 stack_limit_rtx,
7075 GEN_INT (size)));
7077 emit_insn (gen_elf_high (tmp_reg, toload));
7078 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
7079 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
7080 const0_rtx));
7082 else
7083 warning ("stack limit expression is not supported");
7086 if (copy_r12 || ! TARGET_UPDATE)
7087 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
7089 if (TARGET_UPDATE)
7091 if (size > 32767)
7093 /* Need a note here so that try_split doesn't get confused. */
7094 if (get_last_insn() == NULL_RTX)
7095 emit_note (0, NOTE_INSN_DELETED);
7096 insn = emit_move_insn (tmp_reg, todec);
7097 try_split (PATTERN (insn), insn, 0);
7098 todec = tmp_reg;
7101 if (Pmode == SImode)
7102 insn = emit_insn (gen_movsi_update (stack_reg, stack_reg,
7103 todec, stack_reg));
7104 else
7105 insn = emit_insn (gen_movdi_update (stack_reg, stack_reg,
7106 todec, stack_reg));
7108 else
7110 if (Pmode == SImode)
7111 insn = emit_insn (gen_addsi3 (stack_reg, stack_reg, todec));
7112 else
7113 insn = emit_insn (gen_adddi3 (stack_reg, stack_reg, todec));
7114 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
7115 gen_rtx_REG (Pmode, 12));
7118 RTX_FRAME_RELATED_P (insn) = 1;
7119 REG_NOTES (insn) =
7120 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
7121 gen_rtx_SET (VOIDmode, stack_reg,
7122 gen_rtx_PLUS (Pmode, stack_reg,
7123 GEN_INT (-size))),
7124 REG_NOTES (insn));
7127 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
7128 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
7129 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
7130 deduce these equivalences by itself so it wasn't necessary to hold
7131 its hand so much. */
7133 static void
7134 rs6000_frame_related (insn, reg, val, reg2, rreg)
7135 rtx insn;
7136 rtx reg;
7137 HOST_WIDE_INT val;
7138 rtx reg2;
7139 rtx rreg;
7141 rtx real, temp;
7143 real = copy_rtx (PATTERN (insn));
7145 real = replace_rtx (real, reg,
7146 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
7147 STACK_POINTER_REGNUM),
7148 GEN_INT (val)));
7150 /* We expect that 'real' is either a SET or a PARALLEL containing
7151 SETs (and possibly other stuff). In a PARALLEL, all the SETs
7152 are important so they all have to be marked RTX_FRAME_RELATED_P. */
7154 if (GET_CODE (real) == SET)
7156 rtx set = real;
7158 temp = simplify_rtx (SET_SRC (set));
7159 if (temp)
7160 SET_SRC (set) = temp;
7161 temp = simplify_rtx (SET_DEST (set));
7162 if (temp)
7163 SET_DEST (set) = temp;
7164 if (GET_CODE (SET_DEST (set)) == MEM)
7166 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
7167 if (temp)
7168 XEXP (SET_DEST (set), 0) = temp;
7171 else if (GET_CODE (real) == PARALLEL)
7173 int i;
7174 for (i = 0; i < XVECLEN (real, 0); i++)
7175 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
7177 rtx set = XVECEXP (real, 0, i);
7179 temp = simplify_rtx (SET_SRC (set));
7180 if (temp)
7181 SET_SRC (set) = temp;
7182 temp = simplify_rtx (SET_DEST (set));
7183 if (temp)
7184 SET_DEST (set) = temp;
7185 if (GET_CODE (SET_DEST (set)) == MEM)
7187 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
7188 if (temp)
7189 XEXP (SET_DEST (set), 0) = temp;
7191 RTX_FRAME_RELATED_P (set) = 1;
7194 else
7195 abort ();
7197 if (reg2 != NULL_RTX)
7198 real = replace_rtx (real, reg2, rreg);
7200 RTX_FRAME_RELATED_P (insn) = 1;
7201 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
7202 real,
7203 REG_NOTES (insn));
7206 /* Returns an insn that has a vrsave set operation with the
7207 appropriate CLOBBERs. */
7209 static rtx
7210 generate_set_vrsave (reg, info)
7211 rtx reg;
7212 rs6000_stack_t *info;
7214 int nclobs, i;
7215 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
7217 clobs[0] = gen_set_vrsave (reg);
7219 nclobs = 1;
7221 /* CLOBBER the registers in the mask. */
7223 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
7224 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
7225 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
7226 gen_rtx_REG (V4SImode, i));
7228 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
7230 for (i = 0; i < nclobs; ++i)
7231 XVECEXP (insn, 0, i) = clobs[i];
7233 return insn;
7236 /* Emit function prologue as insns. */
7238 void
7239 rs6000_emit_prologue ()
7241 rs6000_stack_t *info = rs6000_stack_info ();
7242 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
7243 int reg_size = TARGET_POWERPC64 ? 8 : 4;
7244 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
7245 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
7246 rtx frame_reg_rtx = sp_reg_rtx;
7247 rtx cr_save_rtx = NULL;
7248 rtx insn;
7249 int saving_FPRs_inline;
7250 int using_store_multiple;
7251 HOST_WIDE_INT sp_offset = 0;
7253 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
7254 && info->first_gp_reg_save < 31);
7255 saving_FPRs_inline = (info->first_fp_reg_save == 64
7256 || FP_SAVE_INLINE (info->first_fp_reg_save));
7258 /* For V.4, update stack before we do any saving and set back pointer. */
7259 if (info->push_p && DEFAULT_ABI == ABI_V4)
7261 if (info->total_size < 32767)
7262 sp_offset = info->total_size;
7263 else
7264 frame_reg_rtx = frame_ptr_rtx;
7265 rs6000_emit_allocate_stack (info->total_size,
7266 (frame_reg_rtx != sp_reg_rtx
7267 && (info->cr_save_p
7268 || info->lr_save_p
7269 || info->first_fp_reg_save < 64
7270 || info->first_gp_reg_save < 32
7271 )));
7272 if (frame_reg_rtx != sp_reg_rtx)
7273 rs6000_emit_stack_tie ();
7276 /* If we use the link register, get it into r0. */
7277 if (info->lr_save_p)
7278 emit_move_insn (gen_rtx_REG (Pmode, 0),
7279 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
7281 /* If we need to save CR, put it into r12. */
7282 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
7284 cr_save_rtx = gen_rtx_REG (SImode, 12);
7285 emit_insn (gen_movesi_from_cr (cr_save_rtx));
7288 /* Do any required saving of fpr's. If only one or two to save, do
7289 it ourselves. Otherwise, call function. */
7290 if (saving_FPRs_inline)
7292 int i;
7293 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
7294 if ((regs_ever_live[info->first_fp_reg_save+i]
7295 && ! call_used_regs[info->first_fp_reg_save+i]))
7297 rtx addr, reg, mem;
7298 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
7299 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
7300 GEN_INT (info->fp_save_offset
7301 + sp_offset
7302 + 8 * i));
7303 mem = gen_rtx_MEM (DFmode, addr);
7304 set_mem_alias_set (mem, rs6000_sr_alias_set);
7306 insn = emit_move_insn (mem, reg);
7307 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
7308 NULL_RTX, NULL_RTX);
7311 else if (info->first_fp_reg_save != 64)
7313 int i;
7314 char rname[30];
7315 const char *alloc_rname;
7316 rtvec p;
7317 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
7319 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
7320 gen_rtx_REG (Pmode,
7321 LINK_REGISTER_REGNUM));
7322 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
7323 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
7324 alloc_rname = ggc_strdup (rname);
7325 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
7326 gen_rtx_SYMBOL_REF (Pmode,
7327 alloc_rname));
7328 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
7330 rtx addr, reg, mem;
7331 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
7332 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
7333 GEN_INT (info->fp_save_offset
7334 + sp_offset + 8*i));
7335 mem = gen_rtx_MEM (DFmode, addr);
7336 set_mem_alias_set (mem, rs6000_sr_alias_set);
7338 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
7340 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
7341 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
7342 NULL_RTX, NULL_RTX);
7345 /* Save GPRs. This is done as a PARALLEL if we are using
7346 the store-multiple instructions. */
7347 if (using_store_multiple)
7349 rtvec p, dwarfp;
7350 int i;
7351 p = rtvec_alloc (32 - info->first_gp_reg_save);
7352 dwarfp = rtvec_alloc (32 - info->first_gp_reg_save);
7353 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
7355 rtx addr, reg, mem;
7356 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
7357 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
7358 GEN_INT (info->gp_save_offset
7359 + sp_offset
7360 + reg_size * i));
7361 mem = gen_rtx_MEM (reg_mode, addr);
7362 set_mem_alias_set (mem, rs6000_sr_alias_set);
7364 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
7366 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
7367 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
7368 NULL_RTX, NULL_RTX);
7370 else
7372 int i;
7373 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
7374 if ((regs_ever_live[info->first_gp_reg_save+i]
7375 && ! call_used_regs[info->first_gp_reg_save+i])
7376 || (i+info->first_gp_reg_save == PIC_OFFSET_TABLE_REGNUM
7377 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
7378 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
7380 rtx addr, reg, mem;
7381 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
7382 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
7383 GEN_INT (info->gp_save_offset
7384 + sp_offset
7385 + reg_size * i));
7386 mem = gen_rtx_MEM (reg_mode, addr);
7387 set_mem_alias_set (mem, rs6000_sr_alias_set);
7389 insn = emit_move_insn (mem, reg);
7390 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
7391 NULL_RTX, NULL_RTX);
7395 /* ??? There's no need to emit actual instructions here, but it's the
7396 easiest way to get the frame unwind information emitted. */
7397 if (current_function_calls_eh_return)
7399 unsigned int i, regno;
7401 for (i = 0; ; ++i)
7403 rtx addr, reg, mem;
7405 regno = EH_RETURN_DATA_REGNO (i);
7406 if (regno == INVALID_REGNUM)
7407 break;
7409 reg = gen_rtx_REG (reg_mode, regno);
7410 addr = plus_constant (frame_reg_rtx,
7411 info->ehrd_offset + sp_offset
7412 + reg_size * (int) i);
7413 mem = gen_rtx_MEM (reg_mode, addr);
7414 set_mem_alias_set (mem, rs6000_sr_alias_set);
7416 insn = emit_move_insn (mem, reg);
7417 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
7418 NULL_RTX, NULL_RTX);
7422 /* Save lr if we used it. */
7423 if (info->lr_save_p)
7425 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
7426 GEN_INT (info->lr_save_offset + sp_offset));
7427 rtx reg = gen_rtx_REG (Pmode, 0);
7428 rtx mem = gen_rtx_MEM (Pmode, addr);
7429 /* This should not be of rs6000_sr_alias_set, because of
7430 __builtin_return_address. */
7432 insn = emit_move_insn (mem, reg);
7433 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
7434 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
7437 /* Save CR if we use any that must be preserved. */
7438 if (info->cr_save_p)
7440 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
7441 GEN_INT (info->cr_save_offset + sp_offset));
7442 rtx mem = gen_rtx_MEM (SImode, addr);
7444 set_mem_alias_set (mem, rs6000_sr_alias_set);
7446 /* If r12 was used to hold the original sp, copy cr into r0 now
7447 that it's free. */
7448 if (REGNO (frame_reg_rtx) == 12)
7450 cr_save_rtx = gen_rtx_REG (SImode, 0);
7451 emit_insn (gen_movesi_from_cr (cr_save_rtx));
7453 insn = emit_move_insn (mem, cr_save_rtx);
7455 /* Now, there's no way that dwarf2out_frame_debug_expr is going
7456 to understand '(unspec:SI [(reg:CC 68) ...] 19)'. But that's
7457 OK. All we have to do is specify that _one_ condition code
7458 register is saved in this stack slot. The thrower's epilogue
7459 will then restore all the call-saved registers.
7460 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
7461 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
7462 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
7465 /* Update stack and set back pointer unless this is V.4,
7466 for which it was done previously. */
7467 if (info->push_p && DEFAULT_ABI != ABI_V4)
7468 rs6000_emit_allocate_stack (info->total_size, FALSE);
7470 /* Save AltiVec registers if needed. */
7471 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
7473 int i;
7475 /* There should be a non inline version of this, for when we
7476 are saving lots of vector registers. */
7477 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
7478 if (regs_ever_live[i] && ! call_used_regs[i])
7480 rtx addr, areg, savereg, mem;
7482 savereg = gen_rtx_REG (V4SImode, i);
7484 areg = gen_rtx_REG (Pmode, 0);
7485 emit_move_insn
7486 (areg, GEN_INT (info->altivec_save_offset
7487 + sp_offset
7488 + 16 * (i - info->first_altivec_reg_save)));
7490 /* AltiVec addressing mode is [reg+reg]. */
7491 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
7492 mem = gen_rtx_MEM (V4SImode, addr);
7493 set_mem_alias_set (mem, rs6000_sr_alias_set);
7495 insn = emit_move_insn (mem, savereg);
7496 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
7497 NULL_RTX, NULL_RTX);
7501 /* VRSAVE is a bit vector representing which AltiVec registers
7502 are used. The OS uses this to determine which vector
7503 registers to save on a context switch. We need to save
7504 VRSAVE on the stack frame, add whatever AltiVec registers we
7505 used in this function, and do the corresponding magic in the
7506 epilogue. */
7508 if (TARGET_ALTIVEC && info->vrsave_mask != 0)
7510 rtx reg, addr, mem;
7512 /* Get VRSAVE onto a GPR. */
7513 reg = gen_rtx_REG (SImode, 12);
7514 emit_insn (gen_get_vrsave (reg));
7516 /* Save VRSAVE. */
7517 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
7518 GEN_INT (info->vrsave_save_offset + sp_offset));
7519 mem = gen_rtx_MEM (SImode, addr);
7520 set_mem_alias_set (mem, rs6000_sr_alias_set);
7521 insn = emit_move_insn (mem, reg);
7522 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
7523 NULL_RTX, NULL_RTX);
7525 /* Include the registers in the mask. */
7526 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
7528 insn = emit_insn (generate_set_vrsave (reg, info));
7530 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
7531 NULL_RTX, NULL_RTX);
7534 /* Set frame pointer, if needed. */
7535 if (frame_pointer_needed)
7537 insn = emit_move_insn (gen_rtx_REG (reg_mode, FRAME_POINTER_REGNUM),
7538 sp_reg_rtx);
7539 RTX_FRAME_RELATED_P (insn) = 1;
7542 /* If we are using PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
7543 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
7544 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
7545 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM]))
7547 /* If emit_load_toc_table will use the link register, we need to save
7548 it. We use R11 for this purpose because emit_load_toc_table
7549 can use register 0. This allows us to use a plain 'blr' to return
7550 from the procedure more often. */
7551 int save_LR_around_toc_setup = (TARGET_ELF && flag_pic != 0
7552 && ! info->lr_save_p
7553 && EXIT_BLOCK_PTR->pred != NULL);
7554 if (save_LR_around_toc_setup)
7555 emit_move_insn (gen_rtx_REG (Pmode, 11),
7556 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
7558 rs6000_emit_load_toc_table (TRUE);
7560 if (save_LR_around_toc_setup)
7561 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
7562 gen_rtx_REG (Pmode, 11));
7565 if (DEFAULT_ABI == ABI_DARWIN
7566 && flag_pic && current_function_uses_pic_offset_table)
7568 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
7570 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest)));
7572 rs6000_maybe_dead (
7573 emit_move_insn (gen_rtx_REG (Pmode, PIC_OFFSET_TABLE_REGNUM),
7574 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
7578 /* Write function prologue. */
7580 static void
7581 rs6000_output_function_prologue (file, size)
7582 FILE *file;
7583 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
7585 rs6000_stack_t *info = rs6000_stack_info ();
7587 if (TARGET_DEBUG_STACK)
7588 debug_stack_info (info);
7590 /* Write .extern for any function we will call to save and restore
7591 fp values. */
7592 if (info->first_fp_reg_save < 64
7593 && !FP_SAVE_INLINE (info->first_fp_reg_save))
7594 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
7595 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
7596 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
7597 RESTORE_FP_SUFFIX);
7599 /* Write .extern for AIX common mode routines, if needed. */
7600 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
7602 fputs ("\t.extern __mulh\n", file);
7603 fputs ("\t.extern __mull\n", file);
7604 fputs ("\t.extern __divss\n", file);
7605 fputs ("\t.extern __divus\n", file);
7606 fputs ("\t.extern __quoss\n", file);
7607 fputs ("\t.extern __quous\n", file);
7608 common_mode_defined = 1;
7611 if (! HAVE_prologue)
7613 start_sequence ();
7615 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
7616 the "toplevel" insn chain. */
7617 emit_note (0, NOTE_INSN_DELETED);
7618 rs6000_emit_prologue ();
7619 emit_note (0, NOTE_INSN_DELETED);
7621 if (TARGET_DEBUG_STACK)
7622 debug_rtx_list (get_insns (), 100);
7623 final (get_insns (), file, FALSE, FALSE);
7624 end_sequence ();
7627 rs6000_pic_labelno++;
7630 /* Emit function epilogue as insns.
7632 At present, dwarf2out_frame_debug_expr doesn't understand
7633 register restores, so we don't bother setting RTX_FRAME_RELATED_P
7634 anywhere in the epilogue. Most of the insns below would in any case
7635 need special notes to explain where r11 is in relation to the stack. */
7637 void
7638 rs6000_emit_epilogue (sibcall)
7639 int sibcall;
7641 rs6000_stack_t *info;
7642 int restoring_FPRs_inline;
7643 int using_load_multiple;
7644 int using_mfcr_multiple;
7645 int use_backchain_to_restore_sp;
7646 int sp_offset = 0;
7647 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
7648 rtx frame_reg_rtx = sp_reg_rtx;
7649 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
7650 int reg_size = TARGET_POWERPC64 ? 8 : 4;
7651 int i;
7653 info = rs6000_stack_info ();
7654 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
7655 && info->first_gp_reg_save < 31);
7656 restoring_FPRs_inline = (sibcall
7657 || current_function_calls_eh_return
7658 || info->first_fp_reg_save == 64
7659 || FP_SAVE_INLINE (info->first_fp_reg_save));
7660 use_backchain_to_restore_sp = (frame_pointer_needed
7661 || current_function_calls_alloca
7662 || info->total_size > 32767);
7663 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
7664 || rs6000_cpu == PROCESSOR_PPC603
7665 || rs6000_cpu == PROCESSOR_PPC750
7666 || optimize_size);
7668 /* If we have a frame pointer, a call to alloca, or a large stack
7669 frame, restore the old stack pointer using the backchain. Otherwise,
7670 we know what size to update it with. */
7671 if (use_backchain_to_restore_sp)
7673 /* Under V.4, don't reset the stack pointer until after we're done
7674 loading the saved registers. */
7675 if (DEFAULT_ABI == ABI_V4)
7676 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
7678 emit_move_insn (frame_reg_rtx,
7679 gen_rtx_MEM (Pmode, sp_reg_rtx));
7682 else if (info->push_p)
7684 if (DEFAULT_ABI == ABI_V4)
7685 sp_offset = info->total_size;
7686 else
7688 emit_insn (TARGET_32BIT
7689 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
7690 GEN_INT (info->total_size))
7691 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
7692 GEN_INT (info->total_size)));
7696 /* Get the old lr if we saved it. */
7697 if (info->lr_save_p)
7699 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
7700 GEN_INT (info->lr_save_offset + sp_offset));
7701 rtx mem = gen_rtx_MEM (Pmode, addr);
7703 set_mem_alias_set (mem, rs6000_sr_alias_set);
7705 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
7708 /* Get the old cr if we saved it. */
7709 if (info->cr_save_p)
7711 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
7712 GEN_INT (info->cr_save_offset + sp_offset));
7713 rtx mem = gen_rtx_MEM (SImode, addr);
7715 set_mem_alias_set (mem, rs6000_sr_alias_set);
7717 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
7720 /* Set LR here to try to overlap restores below. */
7721 if (info->lr_save_p)
7722 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
7723 gen_rtx_REG (Pmode, 0));
7725 /* Load exception handler data registers, if needed. */
7726 if (current_function_calls_eh_return)
7728 unsigned int i, regno;
7730 for (i = 0; ; ++i)
7732 rtx addr, mem;
7734 regno = EH_RETURN_DATA_REGNO (i);
7735 if (regno == INVALID_REGNUM)
7736 break;
7738 addr = plus_constant (frame_reg_rtx,
7739 info->ehrd_offset + sp_offset
7740 + reg_size * (int) i);
7741 mem = gen_rtx_MEM (reg_mode, addr);
7742 set_mem_alias_set (mem, rs6000_sr_alias_set);
7744 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
7748 /* Restore GPRs. This is done as a PARALLEL if we are using
7749 the load-multiple instructions. */
7750 if (using_load_multiple)
7752 rtvec p;
7753 p = rtvec_alloc (32 - info->first_gp_reg_save);
7754 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
7756 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
7757 GEN_INT (info->gp_save_offset
7758 + sp_offset
7759 + reg_size * i));
7760 rtx mem = gen_rtx_MEM (reg_mode, addr);
7762 set_mem_alias_set (mem, rs6000_sr_alias_set);
7764 RTVEC_ELT (p, i) =
7765 gen_rtx_SET (VOIDmode,
7766 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
7767 mem);
7769 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
7771 else
7772 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
7773 if ((regs_ever_live[info->first_gp_reg_save+i]
7774 && ! call_used_regs[info->first_gp_reg_save+i])
7775 || (i+info->first_gp_reg_save == PIC_OFFSET_TABLE_REGNUM
7776 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
7777 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
7779 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
7780 GEN_INT (info->gp_save_offset
7781 + sp_offset
7782 + reg_size * i));
7783 rtx mem = gen_rtx_MEM (reg_mode, addr);
7785 set_mem_alias_set (mem, rs6000_sr_alias_set);
7787 emit_move_insn (gen_rtx_REG (reg_mode,
7788 info->first_gp_reg_save + i),
7789 mem);
7792 /* Restore fpr's if we need to do it without calling a function. */
7793 if (restoring_FPRs_inline)
7794 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
7795 if ((regs_ever_live[info->first_fp_reg_save+i]
7796 && ! call_used_regs[info->first_fp_reg_save+i]))
7798 rtx addr, mem;
7799 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
7800 GEN_INT (info->fp_save_offset
7801 + sp_offset
7802 + 8 * i));
7803 mem = gen_rtx_MEM (DFmode, addr);
7804 set_mem_alias_set (mem, rs6000_sr_alias_set);
7806 emit_move_insn (gen_rtx_REG (DFmode,
7807 info->first_fp_reg_save + i),
7808 mem);
7811 /* Restore AltiVec registers if needed. */
7812 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
7814 int i;
7816 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
7817 if (regs_ever_live[i] && ! call_used_regs[i])
7819 rtx addr, areg, mem;
7821 areg = gen_rtx_REG (Pmode, 0);
7822 emit_move_insn
7823 (areg, GEN_INT (info->altivec_save_offset
7824 + sp_offset
7825 + 16 * (i - info->first_altivec_reg_save)));
7827 /* AltiVec addressing mode is [reg+reg]. */
7828 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
7829 mem = gen_rtx_MEM (V4SImode, addr);
7830 set_mem_alias_set (mem, rs6000_sr_alias_set);
7832 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
7836 /* Restore VRSAVE if needed. */
7837 if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
7839 rtx addr, mem, reg;
7841 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
7842 GEN_INT (info->vrsave_save_offset + sp_offset));
7843 mem = gen_rtx_MEM (SImode, addr);
7844 set_mem_alias_set (mem, rs6000_sr_alias_set);
7845 reg = gen_rtx_REG (SImode, 12);
7846 emit_move_insn (reg, mem);
7848 emit_insn (generate_set_vrsave (reg, info));
7851 /* If we saved cr, restore it here. Just those that were used. */
7852 if (info->cr_save_p)
7854 rtx r12_rtx = gen_rtx_REG (SImode, 12);
7855 int count = 0;
7857 if (using_mfcr_multiple)
7859 for (i = 0; i < 8; i++)
7860 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
7861 count++;
7862 if (count == 0)
7863 abort ();
7866 if (using_mfcr_multiple && count > 1)
7868 rtvec p;
7869 int ndx;
7871 p = rtvec_alloc (count);
7873 ndx = 0;
7874 for (i = 0; i < 8; i++)
7875 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
7877 rtvec r = rtvec_alloc (2);
7878 RTVEC_ELT (r, 0) = r12_rtx;
7879 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
7880 RTVEC_ELT (p, ndx) =
7881 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
7882 gen_rtx_UNSPEC (CCmode, r, 20));
7883 ndx++;
7885 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
7886 if (ndx != count)
7887 abort ();
7889 else
7890 for (i = 0; i < 8; i++)
7891 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
7893 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
7894 CR0_REGNO+i),
7895 r12_rtx));
7899 /* If this is V.4, unwind the stack pointer after all of the loads
7900 have been done. We need to emit a block here so that sched
7901 doesn't decide to move the sp change before the register restores
7902 (which may not have any obvious dependency on the stack). This
7903 doesn't hurt performance, because there is no scheduling that can
7904 be done after this point. */
7905 if (DEFAULT_ABI == ABI_V4)
7907 if (frame_reg_rtx != sp_reg_rtx)
7908 rs6000_emit_stack_tie ();
7910 if (use_backchain_to_restore_sp)
7912 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
7914 else if (sp_offset != 0)
7916 emit_insn (Pmode == SImode
7917 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
7918 GEN_INT (sp_offset))
7919 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
7920 GEN_INT (sp_offset)));
7924 if (current_function_calls_eh_return)
7926 rtx sa = EH_RETURN_STACKADJ_RTX;
7927 emit_insn (Pmode == SImode
7928 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
7929 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
7932 if (!sibcall)
7934 rtvec p;
7935 if (! restoring_FPRs_inline)
7936 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
7937 else
7938 p = rtvec_alloc (2);
7940 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
7941 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
7942 gen_rtx_REG (Pmode,
7943 LINK_REGISTER_REGNUM));
7945 /* If we have to restore more than two FP registers, branch to the
7946 restore function. It will return to our caller. */
7947 if (! restoring_FPRs_inline)
7949 int i;
7950 char rname[30];
7951 const char *alloc_rname;
7953 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
7954 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
7955 alloc_rname = ggc_strdup (rname);
7956 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
7957 gen_rtx_SYMBOL_REF (Pmode,
7958 alloc_rname));
7960 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
7962 rtx addr, mem;
7963 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
7964 GEN_INT (info->fp_save_offset + 8*i));
7965 mem = gen_rtx_MEM (DFmode, addr);
7966 set_mem_alias_set (mem, rs6000_sr_alias_set);
7968 RTVEC_ELT (p, i+3) =
7969 gen_rtx_SET (VOIDmode,
7970 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
7971 mem);
7975 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
7979 /* Write function epilogue. */
7981 static void
7982 rs6000_output_function_epilogue (file, size)
7983 FILE *file;
7984 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
7986 rs6000_stack_t *info = rs6000_stack_info ();
7988 if (! HAVE_epilogue)
7990 rtx insn = get_last_insn ();
7991 /* If the last insn was a BARRIER, we don't have to write anything except
7992 the trace table. */
7993 if (GET_CODE (insn) == NOTE)
7994 insn = prev_nonnote_insn (insn);
7995 if (insn == 0 || GET_CODE (insn) != BARRIER)
7997 /* This is slightly ugly, but at least we don't have two
7998 copies of the epilogue-emitting code. */
7999 start_sequence ();
8001 /* A NOTE_INSN_DELETED is supposed to be at the start
8002 and end of the "toplevel" insn chain. */
8003 emit_note (0, NOTE_INSN_DELETED);
8004 rs6000_emit_epilogue (FALSE);
8005 emit_note (0, NOTE_INSN_DELETED);
8007 if (TARGET_DEBUG_STACK)
8008 debug_rtx_list (get_insns (), 100);
8009 final (get_insns (), file, FALSE, FALSE);
8010 end_sequence ();
8014 /* Output a traceback table here. See /usr/include/sys/debug.h for info
8015 on its format.
8017 We don't output a traceback table if -finhibit-size-directive was
8018 used. The documentation for -finhibit-size-directive reads
8019 ``don't output a @code{.size} assembler directive, or anything
8020 else that would cause trouble if the function is split in the
8021 middle, and the two halves are placed at locations far apart in
8022 memory.'' The traceback table has this property, since it
8023 includes the offset from the start of the function to the
8024 traceback table itself.
8026 System V.4 Powerpc's (and the embedded ABI derived from it) use a
8027 different traceback table. */
8028 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive)
8030 const char *fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
8031 const char *language_string = lang_hooks.name;
8032 int fixed_parms, float_parms, parm_info;
8033 int i;
8035 while (*fname == '.') /* V.4 encodes . in the name */
8036 fname++;
8038 /* Need label immediately before tbtab, so we can compute its offset
8039 from the function start. */
8040 if (*fname == '*')
8041 ++fname;
8042 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
8043 ASM_OUTPUT_LABEL (file, fname);
8045 /* The .tbtab pseudo-op can only be used for the first eight
8046 expressions, since it can't handle the possibly variable
8047 length fields that follow. However, if you omit the optional
8048 fields, the assembler outputs zeros for all optional fields
8049 anyways, giving each variable length field is minimum length
8050 (as defined in sys/debug.h). Thus we can not use the .tbtab
8051 pseudo-op at all. */
8053 /* An all-zero word flags the start of the tbtab, for debuggers
8054 that have to find it by searching forward from the entry
8055 point or from the current pc. */
8056 fputs ("\t.long 0\n", file);
8058 /* Tbtab format type. Use format type 0. */
8059 fputs ("\t.byte 0,", file);
8061 /* Language type. Unfortunately, there doesn't seem to be any
8062 official way to get this info, so we use language_string. C
8063 is 0. C++ is 9. No number defined for Obj-C, so use the
8064 value for C for now. There is no official value for Java,
8065 although IBM appears to be using 13. There is no official value
8066 for Chill, so we've chosen 44 pseudo-randomly. */
8067 if (! strcmp (language_string, "GNU C")
8068 || ! strcmp (language_string, "GNU Objective-C"))
8069 i = 0;
8070 else if (! strcmp (language_string, "GNU F77"))
8071 i = 1;
8072 else if (! strcmp (language_string, "GNU Ada"))
8073 i = 3;
8074 else if (! strcmp (language_string, "GNU Pascal"))
8075 i = 2;
8076 else if (! strcmp (language_string, "GNU C++"))
8077 i = 9;
8078 else if (! strcmp (language_string, "GNU Java"))
8079 i = 13;
8080 else if (! strcmp (language_string, "GNU CHILL"))
8081 i = 44;
8082 else
8083 abort ();
8084 fprintf (file, "%d,", i);
8086 /* 8 single bit fields: global linkage (not set for C extern linkage,
8087 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
8088 from start of procedure stored in tbtab, internal function, function
8089 has controlled storage, function has no toc, function uses fp,
8090 function logs/aborts fp operations. */
8091 /* Assume that fp operations are used if any fp reg must be saved. */
8092 fprintf (file, "%d,", (1 << 5) | ((info->first_fp_reg_save != 64) << 1));
8094 /* 6 bitfields: function is interrupt handler, name present in
8095 proc table, function calls alloca, on condition directives
8096 (controls stack walks, 3 bits), saves condition reg, saves
8097 link reg. */
8098 /* The `function calls alloca' bit seems to be set whenever reg 31 is
8099 set up as a frame pointer, even when there is no alloca call. */
8100 fprintf (file, "%d,",
8101 ((1 << 6) | (frame_pointer_needed << 5)
8102 | (info->cr_save_p << 1) | (info->lr_save_p)));
8104 /* 3 bitfields: saves backchain, spare bit, number of fpr saved
8105 (6 bits). */
8106 fprintf (file, "%d,",
8107 (info->push_p << 7) | (64 - info->first_fp_reg_save));
8109 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
8110 fprintf (file, "%d,", (32 - first_reg_to_save ()));
8113 /* Compute the parameter info from the function decl argument
8114 list. */
8115 tree decl;
8116 int next_parm_info_bit;
8118 next_parm_info_bit = 31;
8119 parm_info = 0;
8120 fixed_parms = 0;
8121 float_parms = 0;
8123 for (decl = DECL_ARGUMENTS (current_function_decl);
8124 decl; decl = TREE_CHAIN (decl))
8126 rtx parameter = DECL_INCOMING_RTL (decl);
8127 enum machine_mode mode = GET_MODE (parameter);
8129 if (GET_CODE (parameter) == REG)
8131 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
8133 int bits;
8135 float_parms++;
8137 if (mode == SFmode)
8138 bits = 0x2;
8139 else if (mode == DFmode)
8140 bits = 0x3;
8141 else
8142 abort ();
8144 /* If only one bit will fit, don't or in this entry. */
8145 if (next_parm_info_bit > 0)
8146 parm_info |= (bits << (next_parm_info_bit - 1));
8147 next_parm_info_bit -= 2;
8149 else
8151 fixed_parms += ((GET_MODE_SIZE (mode)
8152 + (UNITS_PER_WORD - 1))
8153 / UNITS_PER_WORD);
8154 next_parm_info_bit -= 1;
8160 /* Number of fixed point parameters. */
8161 /* This is actually the number of words of fixed point parameters; thus
8162 an 8 byte struct counts as 2; and thus the maximum value is 8. */
8163 fprintf (file, "%d,", fixed_parms);
8165 /* 2 bitfields: number of floating point parameters (7 bits), parameters
8166 all on stack. */
8167 /* This is actually the number of fp registers that hold parameters;
8168 and thus the maximum value is 13. */
8169 /* Set parameters on stack bit if parameters are not in their original
8170 registers, regardless of whether they are on the stack? Xlc
8171 seems to set the bit when not optimizing. */
8172 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
8174 /* Optional fields follow. Some are variable length. */
8176 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
8177 11 double float. */
8178 /* There is an entry for each parameter in a register, in the order that
8179 they occur in the parameter list. Any intervening arguments on the
8180 stack are ignored. If the list overflows a long (max possible length
8181 34 bits) then completely leave off all elements that don't fit. */
8182 /* Only emit this long if there was at least one parameter. */
8183 if (fixed_parms || float_parms)
8184 fprintf (file, "\t.long %d\n", parm_info);
8186 /* Offset from start of code to tb table. */
8187 fputs ("\t.long ", file);
8188 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
8189 #if TARGET_AIX
8190 RS6000_OUTPUT_BASENAME (file, fname);
8191 #else
8192 assemble_name (file, fname);
8193 #endif
8194 fputs ("-.", file);
8195 #if TARGET_AIX
8196 RS6000_OUTPUT_BASENAME (file, fname);
8197 #else
8198 assemble_name (file, fname);
8199 #endif
8200 putc ('\n', file);
8202 /* Interrupt handler mask. */
8203 /* Omit this long, since we never set the interrupt handler bit
8204 above. */
8206 /* Number of CTL (controlled storage) anchors. */
8207 /* Omit this long, since the has_ctl bit is never set above. */
8209 /* Displacement into stack of each CTL anchor. */
8210 /* Omit this list of longs, because there are no CTL anchors. */
8212 /* Length of function name. */
8213 fprintf (file, "\t.short %d\n", (int) strlen (fname));
8215 /* Function name. */
8216 assemble_string (fname, strlen (fname));
8218 /* Register for alloca automatic storage; this is always reg 31.
8219 Only emit this if the alloca bit was set above. */
8220 if (frame_pointer_needed)
8221 fputs ("\t.byte 31\n", file);
8225 /* A C compound statement that outputs the assembler code for a thunk
8226 function, used to implement C++ virtual function calls with
8227 multiple inheritance. The thunk acts as a wrapper around a virtual
8228 function, adjusting the implicit object parameter before handing
8229 control off to the real function.
8231 First, emit code to add the integer DELTA to the location that
8232 contains the incoming first argument. Assume that this argument
8233 contains a pointer, and is the one used to pass the `this' pointer
8234 in C++. This is the incoming argument *before* the function
8235 prologue, e.g. `%o0' on a sparc. The addition must preserve the
8236 values of all other incoming arguments.
8238 After the addition, emit code to jump to FUNCTION, which is a
8239 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
8240 not touch the return address. Hence returning from FUNCTION will
8241 return to whoever called the current `thunk'.
8243 The effect must be as if FUNCTION had been called directly with the
8244 adjusted first argument. This macro is responsible for emitting
8245 all of the code for a thunk function; output_function_prologue()
8246 and output_function_epilogue() are not invoked.
8248 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
8249 been extracted from it.) It might possibly be useful on some
8250 targets, but probably not.
8252 If you do not define this macro, the target-independent code in the
8253 C++ frontend will generate a less efficient heavyweight thunk that
8254 calls FUNCTION instead of jumping to it. The generic approach does
8255 not support varargs. */
8257 void
8258 output_mi_thunk (file, thunk_fndecl, delta, function)
8259 FILE *file;
8260 tree thunk_fndecl ATTRIBUTE_UNUSED;
8261 int delta;
8262 tree function;
8264 const char *this_reg =
8265 reg_names[ aggregate_value_p (TREE_TYPE (TREE_TYPE (function))) ? 4 : 3 ];
8266 const char *prefix;
8267 const char *fname;
8268 const char *r0 = reg_names[0];
8269 const char *toc = reg_names[2];
8270 const char *schain = reg_names[11];
8271 const char *r12 = reg_names[12];
8272 char buf[512];
8273 static int labelno = 0;
8275 /* Small constants that can be done by one add instruction. */
8276 if (delta >= -32768 && delta <= 32767)
8278 if (! TARGET_NEW_MNEMONICS)
8279 fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta, this_reg);
8280 else
8281 fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta);
8284 /* Large constants that can be done by one addis instruction. */
8285 else if ((delta & 0xffff) == 0 && num_insns_constant_wide (delta) == 1)
8286 asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
8287 delta >> 16);
8289 /* 32-bit constants that can be done by an add and addis instruction. */
8290 else if (TARGET_32BIT || num_insns_constant_wide (delta) == 1)
8292 /* Break into two pieces, propagating the sign bit from the low
8293 word to the upper word. */
8294 int delta_high = delta >> 16;
8295 int delta_low = delta & 0xffff;
8296 if ((delta_low & 0x8000) != 0)
8298 delta_high++;
8299 delta_low = (delta_low ^ 0x8000) - 0x8000; /* sign extend */
8302 asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
8303 delta_high);
8305 if (! TARGET_NEW_MNEMONICS)
8306 fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta_low, this_reg);
8307 else
8308 fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta_low);
8311 /* 64-bit constants, fixme */
8312 else
8313 abort ();
8315 /* Get the prefix in front of the names. */
8316 switch (DEFAULT_ABI)
8318 default:
8319 abort ();
8321 case ABI_AIX:
8322 prefix = ".";
8323 break;
8325 case ABI_V4:
8326 case ABI_AIX_NODESC:
8327 prefix = "";
8328 break;
8331 /* If the function is compiled in this module, jump to it directly.
8332 Otherwise, load up its address and jump to it. */
8334 fname = XSTR (XEXP (DECL_RTL (function), 0), 0);
8336 if (current_file_function_operand (XEXP (DECL_RTL (function), 0), VOIDmode)
8337 && ! lookup_attribute ("longcall",
8338 TYPE_ATTRIBUTES (TREE_TYPE (function))))
8340 fprintf (file, "\tb %s", prefix);
8341 assemble_name (file, fname);
8342 if (DEFAULT_ABI == ABI_V4 && flag_pic) fputs ("@local", file);
8343 putc ('\n', file);
8346 else
8348 switch (DEFAULT_ABI)
8350 default:
8351 abort ();
8353 case ABI_AIX:
8354 /* Set up a TOC entry for the function. */
8355 ASM_GENERATE_INTERNAL_LABEL (buf, "Lthunk", labelno);
8356 toc_section ();
8357 ASM_OUTPUT_INTERNAL_LABEL (file, "Lthunk", labelno);
8358 labelno++;
8360 if (TARGET_MINIMAL_TOC)
8361 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
8362 else
8364 fputs ("\t.tc ", file);
8365 assemble_name (file, fname);
8366 fputs ("[TC],", file);
8368 assemble_name (file, fname);
8369 putc ('\n', file);
8370 text_section ();
8371 if (TARGET_MINIMAL_TOC)
8372 asm_fprintf (file, (TARGET_32BIT)
8373 ? "\t{l|lwz} %s,%s(%s)\n" : "\tld %s,%s(%s)\n", r12,
8374 TARGET_ELF ? ".LCTOC0@toc" : ".LCTOC..1", toc);
8375 asm_fprintf (file, (TARGET_32BIT) ? "\t{l|lwz} %s," : "\tld %s,", r12);
8376 assemble_name (file, buf);
8377 if (TARGET_ELF && TARGET_MINIMAL_TOC)
8378 fputs ("-(.LCTOC1)", file);
8379 asm_fprintf (file, "(%s)\n", TARGET_MINIMAL_TOC ? r12 : toc);
8380 asm_fprintf (file,
8381 (TARGET_32BIT) ? "\t{l|lwz} %s,0(%s)\n" : "\tld %s,0(%s)\n",
8382 r0, r12);
8384 asm_fprintf (file,
8385 (TARGET_32BIT) ? "\t{l|lwz} %s,4(%s)\n" : "\tld %s,8(%s)\n",
8386 toc, r12);
8388 asm_fprintf (file, "\tmtctr %s\n", r0);
8389 asm_fprintf (file,
8390 (TARGET_32BIT) ? "\t{l|lwz} %s,8(%s)\n" : "\tld %s,16(%s)\n",
8391 schain, r12);
8393 asm_fprintf (file, "\tbctr\n");
8394 break;
8396 case ABI_AIX_NODESC:
8397 case ABI_V4:
8398 fprintf (file, "\tb %s", prefix);
8399 assemble_name (file, fname);
8400 if (flag_pic) fputs ("@plt", file);
8401 putc ('\n', file);
8402 break;
8404 #if TARGET_MACHO
8405 case ABI_DARWIN:
8406 fprintf (file, "\tb %s", prefix);
8407 if (flag_pic && !machopic_name_defined_p (fname))
8408 assemble_name (file, machopic_stub_name (fname));
8409 else
8410 assemble_name (file, fname);
8411 putc ('\n', file);
8412 break;
8413 #endif
8419 /* A quick summary of the various types of 'constant-pool tables'
8420 under PowerPC:
8422 Target Flags Name One table per
8423 AIX (none) AIX TOC object file
8424 AIX -mfull-toc AIX TOC object file
8425 AIX -mminimal-toc AIX minimal TOC translation unit
8426 SVR4/EABI (none) SVR4 SDATA object file
8427 SVR4/EABI -fpic SVR4 pic object file
8428 SVR4/EABI -fPIC SVR4 PIC translation unit
8429 SVR4/EABI -mrelocatable EABI TOC function
8430 SVR4/EABI -maix AIX TOC object file
8431 SVR4/EABI -maix -mminimal-toc
8432 AIX minimal TOC translation unit
8434 Name Reg. Set by entries contains:
8435 made by addrs? fp? sum?
8437 AIX TOC 2 crt0 as Y option option
8438 AIX minimal TOC 30 prolog gcc Y Y option
8439 SVR4 SDATA 13 crt0 gcc N Y N
8440 SVR4 pic 30 prolog ld Y not yet N
8441 SVR4 PIC 30 prolog gcc Y option option
8442 EABI TOC 30 prolog gcc Y option option
8446 /* Hash table stuff for keeping track of TOC entries. */
8448 struct toc_hash_struct
8450 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
8451 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
8452 rtx key;
8453 enum machine_mode key_mode;
8454 int labelno;
8457 static htab_t toc_hash_table;
8459 /* Hash functions for the hash table. */
8461 static unsigned
8462 rs6000_hash_constant (k)
8463 rtx k;
8465 unsigned result = (GET_CODE (k) << 3) ^ GET_MODE (k);
8466 const char *format = GET_RTX_FORMAT (GET_CODE (k));
8467 int flen = strlen (format);
8468 int fidx;
8470 if (GET_CODE (k) == LABEL_REF)
8471 return result * 1231 + X0INT (XEXP (k, 0), 3);
8473 if (GET_CODE (k) == CONST_DOUBLE)
8474 fidx = 1;
8475 else if (GET_CODE (k) == CODE_LABEL)
8476 fidx = 3;
8477 else
8478 fidx = 0;
8480 for (; fidx < flen; fidx++)
8481 switch (format[fidx])
8483 case 's':
8485 unsigned i, len;
8486 const char *str = XSTR (k, fidx);
8487 len = strlen (str);
8488 result = result * 613 + len;
8489 for (i = 0; i < len; i++)
8490 result = result * 613 + (unsigned) str[i];
8491 break;
8493 case 'u':
8494 case 'e':
8495 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
8496 break;
8497 case 'i':
8498 case 'n':
8499 result = result * 613 + (unsigned) XINT (k, fidx);
8500 break;
8501 case 'w':
8502 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
8503 result = result * 613 + (unsigned) XWINT (k, fidx);
8504 else
8506 size_t i;
8507 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
8508 result = result * 613 + (unsigned) (XWINT (k, fidx)
8509 >> CHAR_BIT * i);
8511 break;
8512 default:
8513 abort ();
8515 return result;
8518 static unsigned
8519 toc_hash_function (hash_entry)
8520 const void * hash_entry;
8522 const struct toc_hash_struct *thc =
8523 (const struct toc_hash_struct *) hash_entry;
8524 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
8527 /* Compare H1 and H2 for equivalence. */
8529 static int
8530 toc_hash_eq (h1, h2)
8531 const void * h1;
8532 const void * h2;
8534 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
8535 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
8537 if (((const struct toc_hash_struct *) h1)->key_mode
8538 != ((const struct toc_hash_struct *) h2)->key_mode)
8539 return 0;
8541 /* Gotcha: One of these const_doubles will be in memory.
8542 The other may be on the constant-pool chain.
8543 So rtx_equal_p will think they are different... */
8544 if (r1 == r2)
8545 return 1;
8546 if (GET_CODE (r1) != GET_CODE (r2)
8547 || GET_MODE (r1) != GET_MODE (r2))
8548 return 0;
8549 if (GET_CODE (r1) == CONST_DOUBLE)
8551 int format_len = strlen (GET_RTX_FORMAT (CONST_DOUBLE));
8552 int i;
8553 for (i = 1; i < format_len; i++)
8554 if (XWINT (r1, i) != XWINT (r2, i))
8555 return 0;
8557 return 1;
8559 else if (GET_CODE (r1) == LABEL_REF)
8560 return (CODE_LABEL_NUMBER (XEXP (r1, 0))
8561 == CODE_LABEL_NUMBER (XEXP (r2, 0)));
8562 else
8563 return rtx_equal_p (r1, r2);
8566 /* Mark the hash table-entry HASH_ENTRY. */
8568 static int
8569 toc_hash_mark_entry (hash_slot, unused)
8570 void ** hash_slot;
8571 void * unused ATTRIBUTE_UNUSED;
8573 const struct toc_hash_struct * hash_entry =
8574 *(const struct toc_hash_struct **) hash_slot;
8575 rtx r = hash_entry->key;
8576 ggc_set_mark (hash_entry);
8577 /* For CODE_LABELS, we don't want to drag in the whole insn chain... */
8578 if (GET_CODE (r) == LABEL_REF)
8580 ggc_set_mark (r);
8581 ggc_set_mark (XEXP (r, 0));
8583 else
8584 ggc_mark_rtx (r);
8585 return 1;
8588 /* Mark all the elements of the TOC hash-table *HT. */
8590 static void
8591 toc_hash_mark_table (vht)
8592 void *vht;
8594 htab_t *ht = vht;
8596 htab_traverse (*ht, toc_hash_mark_entry, (void *)0);
8599 /* These are the names given by the C++ front-end to vtables, and
8600 vtable-like objects. Ideally, this logic should not be here;
8601 instead, there should be some programmatic way of inquiring as
8602 to whether or not an object is a vtable. */
8604 #define VTABLE_NAME_P(NAME) \
8605 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
8606 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
8607 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
8608 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
8610 void
8611 rs6000_output_symbol_ref (file, x)
8612 FILE *file;
8613 rtx x;
8615 /* Currently C++ toc references to vtables can be emitted before it
8616 is decided whether the vtable is public or private. If this is
8617 the case, then the linker will eventually complain that there is
8618 a reference to an unknown section. Thus, for vtables only,
8619 we emit the TOC reference to reference the symbol and not the
8620 section. */
8621 const char *name = XSTR (x, 0);
8623 if (VTABLE_NAME_P (name))
8625 RS6000_OUTPUT_BASENAME (file, name);
8627 else
8628 assemble_name (file, name);
8631 /* Output a TOC entry. We derive the entry name from what is being
8632 written. */
8634 void
8635 output_toc (file, x, labelno, mode)
8636 FILE *file;
8637 rtx x;
8638 int labelno;
8639 enum machine_mode mode;
8641 char buf[256];
8642 const char *name = buf;
8643 const char *real_name;
8644 rtx base = x;
8645 int offset = 0;
8647 if (TARGET_NO_TOC)
8648 abort ();
8650 /* When the linker won't eliminate them, don't output duplicate
8651 TOC entries (this happens on AIX if there is any kind of TOC,
8652 and on SVR4 under -fPIC or -mrelocatable). */
8653 if (TARGET_TOC)
8655 struct toc_hash_struct *h;
8656 void * * found;
8658 h = ggc_alloc (sizeof (*h));
8659 h->key = x;
8660 h->key_mode = mode;
8661 h->labelno = labelno;
8663 found = htab_find_slot (toc_hash_table, h, 1);
8664 if (*found == NULL)
8665 *found = h;
8666 else /* This is indeed a duplicate.
8667 Set this label equal to that label. */
8669 fputs ("\t.set ", file);
8670 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
8671 fprintf (file, "%d,", labelno);
8672 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
8673 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
8674 found)->labelno));
8675 return;
8679 /* If we're going to put a double constant in the TOC, make sure it's
8680 aligned properly when strict alignment is on. */
8681 if (GET_CODE (x) == CONST_DOUBLE
8682 && STRICT_ALIGNMENT
8683 && GET_MODE_BITSIZE (mode) >= 64
8684 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
8685 ASM_OUTPUT_ALIGN (file, 3);
8688 ASM_OUTPUT_INTERNAL_LABEL (file, "LC", labelno);
8690 /* Handle FP constants specially. Note that if we have a minimal
8691 TOC, things we put here aren't actually in the TOC, so we can allow
8692 FP constants. */
8693 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
8695 REAL_VALUE_TYPE rv;
8696 long k[2];
8698 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
8699 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
8701 if (TARGET_64BIT)
8703 if (TARGET_MINIMAL_TOC)
8704 fputs (DOUBLE_INT_ASM_OP, file);
8705 else
8706 fprintf (file, "\t.tc FD_%lx_%lx[TC],", k[0], k[1]);
8707 fprintf (file, "0x%lx%08lx\n", k[0], k[1]);
8708 return;
8710 else
8712 if (TARGET_MINIMAL_TOC)
8713 fputs ("\t.long ", file);
8714 else
8715 fprintf (file, "\t.tc FD_%lx_%lx[TC],", k[0], k[1]);
8716 fprintf (file, "0x%lx,0x%lx\n", k[0], k[1]);
8717 return;
8720 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
8722 REAL_VALUE_TYPE rv;
8723 long l;
8725 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
8726 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
8728 if (TARGET_64BIT)
8730 if (TARGET_MINIMAL_TOC)
8731 fputs (DOUBLE_INT_ASM_OP, file);
8732 else
8733 fprintf (file, "\t.tc FS_%lx[TC],", l);
8734 fprintf (file, "0x%lx00000000\n", l);
8735 return;
8737 else
8739 if (TARGET_MINIMAL_TOC)
8740 fputs ("\t.long ", file);
8741 else
8742 fprintf (file, "\t.tc FS_%lx[TC],", l);
8743 fprintf (file, "0x%lx\n", l);
8744 return;
8747 else if (GET_MODE (x) == VOIDmode
8748 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
8750 unsigned HOST_WIDE_INT low;
8751 HOST_WIDE_INT high;
8753 if (GET_CODE (x) == CONST_DOUBLE)
8755 low = CONST_DOUBLE_LOW (x);
8756 high = CONST_DOUBLE_HIGH (x);
8758 else
8759 #if HOST_BITS_PER_WIDE_INT == 32
8761 low = INTVAL (x);
8762 high = (low & 0x80000000) ? ~0 : 0;
8764 #else
8766 low = INTVAL (x) & 0xffffffff;
8767 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
8769 #endif
8771 /* TOC entries are always Pmode-sized, but since this
8772 is a bigendian machine then if we're putting smaller
8773 integer constants in the TOC we have to pad them.
8774 (This is still a win over putting the constants in
8775 a separate constant pool, because then we'd have
8776 to have both a TOC entry _and_ the actual constant.)
8778 For a 32-bit target, CONST_INT values are loaded and shifted
8779 entirely within `low' and can be stored in one TOC entry. */
8781 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
8782 abort ();/* It would be easy to make this work, but it doesn't now. */
8784 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
8785 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
8786 POINTER_SIZE, &low, &high, 0);
8788 if (TARGET_64BIT)
8790 if (TARGET_MINIMAL_TOC)
8791 fputs (DOUBLE_INT_ASM_OP, file);
8792 else
8793 fprintf (file, "\t.tc ID_%lx_%lx[TC],", (long)high, (long)low);
8794 fprintf (file, "0x%lx%08lx\n", (long) high, (long) low);
8795 return;
8797 else
8799 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
8801 if (TARGET_MINIMAL_TOC)
8802 fputs ("\t.long ", file);
8803 else
8804 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
8805 (long)high, (long)low);
8806 fprintf (file, "0x%lx,0x%lx\n", (long) high, (long) low);
8808 else
8810 if (TARGET_MINIMAL_TOC)
8811 fputs ("\t.long ", file);
8812 else
8813 fprintf (file, "\t.tc IS_%lx[TC],", (long) low);
8814 fprintf (file, "0x%lx\n", (long) low);
8816 return;
8820 if (GET_CODE (x) == CONST)
8822 if (GET_CODE (XEXP (x, 0)) != PLUS)
8823 abort ();
8825 base = XEXP (XEXP (x, 0), 0);
8826 offset = INTVAL (XEXP (XEXP (x, 0), 1));
8829 if (GET_CODE (base) == SYMBOL_REF)
8830 name = XSTR (base, 0);
8831 else if (GET_CODE (base) == LABEL_REF)
8832 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
8833 else if (GET_CODE (base) == CODE_LABEL)
8834 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
8835 else
8836 abort ();
8838 STRIP_NAME_ENCODING (real_name, name);
8839 if (TARGET_MINIMAL_TOC)
8840 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
8841 else
8843 fprintf (file, "\t.tc %s", real_name);
8845 if (offset < 0)
8846 fprintf (file, ".N%d", - offset);
8847 else if (offset)
8848 fprintf (file, ".P%d", offset);
8850 fputs ("[TC],", file);
8853 /* Currently C++ toc references to vtables can be emitted before it
8854 is decided whether the vtable is public or private. If this is
8855 the case, then the linker will eventually complain that there is
8856 a TOC reference to an unknown section. Thus, for vtables only,
8857 we emit the TOC reference to reference the symbol and not the
8858 section. */
8859 if (VTABLE_NAME_P (name))
8861 RS6000_OUTPUT_BASENAME (file, name);
8862 if (offset < 0)
8863 fprintf (file, "%d", offset);
8864 else if (offset > 0)
8865 fprintf (file, "+%d", offset);
8867 else
8868 output_addr_const (file, x);
8869 putc ('\n', file);
8872 /* Output an assembler pseudo-op to write an ASCII string of N characters
8873 starting at P to FILE.
8875 On the RS/6000, we have to do this using the .byte operation and
8876 write out special characters outside the quoted string.
8877 Also, the assembler is broken; very long strings are truncated,
8878 so we must artificially break them up early. */
8880 void
8881 output_ascii (file, p, n)
8882 FILE *file;
8883 const char *p;
8884 int n;
8886 char c;
8887 int i, count_string;
8888 const char *for_string = "\t.byte \"";
8889 const char *for_decimal = "\t.byte ";
8890 const char *to_close = NULL;
8892 count_string = 0;
8893 for (i = 0; i < n; i++)
8895 c = *p++;
8896 if (c >= ' ' && c < 0177)
8898 if (for_string)
8899 fputs (for_string, file);
8900 putc (c, file);
8902 /* Write two quotes to get one. */
8903 if (c == '"')
8905 putc (c, file);
8906 ++count_string;
8909 for_string = NULL;
8910 for_decimal = "\"\n\t.byte ";
8911 to_close = "\"\n";
8912 ++count_string;
8914 if (count_string >= 512)
8916 fputs (to_close, file);
8918 for_string = "\t.byte \"";
8919 for_decimal = "\t.byte ";
8920 to_close = NULL;
8921 count_string = 0;
8924 else
8926 if (for_decimal)
8927 fputs (for_decimal, file);
8928 fprintf (file, "%d", c);
8930 for_string = "\n\t.byte \"";
8931 for_decimal = ", ";
8932 to_close = "\n";
8933 count_string = 0;
8937 /* Now close the string if we have written one. Then end the line. */
8938 if (to_close)
8939 fputs (to_close, file);
8942 /* Generate a unique section name for FILENAME for a section type
8943 represented by SECTION_DESC. Output goes into BUF.
8945 SECTION_DESC can be any string, as long as it is different for each
8946 possible section type.
8948 We name the section in the same manner as xlc. The name begins with an
8949 underscore followed by the filename (after stripping any leading directory
8950 names) with the last period replaced by the string SECTION_DESC. If
8951 FILENAME does not contain a period, SECTION_DESC is appended to the end of
8952 the name. */
8954 void
8955 rs6000_gen_section_name (buf, filename, section_desc)
8956 char **buf;
8957 const char *filename;
8958 const char *section_desc;
8960 const char *q, *after_last_slash, *last_period = 0;
8961 char *p;
8962 int len;
8964 after_last_slash = filename;
8965 for (q = filename; *q; q++)
8967 if (*q == '/')
8968 after_last_slash = q + 1;
8969 else if (*q == '.')
8970 last_period = q;
8973 len = strlen (after_last_slash) + strlen (section_desc) + 2;
8974 *buf = (char *) permalloc (len);
8976 p = *buf;
8977 *p++ = '_';
8979 for (q = after_last_slash; *q; q++)
8981 if (q == last_period)
8983 strcpy (p, section_desc);
8984 p += strlen (section_desc);
8987 else if (ISALNUM (*q))
8988 *p++ = *q;
8991 if (last_period == 0)
8992 strcpy (p, section_desc);
8993 else
8994 *p = '\0';
8997 /* Emit profile function. */
8999 void
9000 output_profile_hook (labelno)
9001 int labelno;
9003 if (DEFAULT_ABI == ABI_AIX)
9005 char buf[30];
9006 const char *label_name;
9007 rtx fun;
9009 labelno += 1;
9011 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
9012 STRIP_NAME_ENCODING (label_name, ggc_strdup (buf));
9013 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
9015 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
9016 fun, Pmode);
9018 else if (DEFAULT_ABI == ABI_DARWIN)
9020 const char *mcount_name = RS6000_MCOUNT;
9021 int caller_addr_regno = LINK_REGISTER_REGNUM;
9023 /* Be conservative and always set this, at least for now. */
9024 current_function_uses_pic_offset_table = 1;
9026 #if TARGET_MACHO
9027 /* For PIC code, set up a stub and collect the caller's address
9028 from r0, which is where the prologue puts it. */
9029 if (flag_pic)
9031 mcount_name = machopic_stub_name (mcount_name);
9032 if (current_function_uses_pic_offset_table)
9033 caller_addr_regno = 0;
9035 #endif
9036 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
9037 0, VOIDmode, 1,
9038 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
9042 /* Write function profiler code. */
9044 void
9045 output_function_profiler (file, labelno)
9046 FILE *file;
9047 int labelno;
9049 char buf[100];
9051 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
9052 switch (DEFAULT_ABI)
9054 default:
9055 abort ();
9057 case ABI_V4:
9058 case ABI_AIX_NODESC:
9059 fprintf (file, "\tmflr %s\n", reg_names[0]);
9060 if (flag_pic == 1)
9062 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
9063 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
9064 reg_names[0], reg_names[1]);
9065 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
9066 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
9067 assemble_name (file, buf);
9068 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
9070 else if (flag_pic > 1)
9072 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
9073 reg_names[0], reg_names[1]);
9074 /* Now, we need to get the address of the label. */
9075 fputs ("\tbl 1f\n\t.long ", file);
9076 assemble_name (file, buf);
9077 fputs ("-.\n1:", file);
9078 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
9079 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
9080 reg_names[0], reg_names[11]);
9081 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
9082 reg_names[0], reg_names[0], reg_names[11]);
9084 else
9086 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
9087 assemble_name (file, buf);
9088 fputs ("@ha\n", file);
9089 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
9090 reg_names[0], reg_names[1]);
9091 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
9092 assemble_name (file, buf);
9093 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
9096 if (current_function_needs_context)
9097 asm_fprintf (file, "\tmr %s,%s\n",
9098 reg_names[30], reg_names[STATIC_CHAIN_REGNUM]);
9099 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
9100 if (current_function_needs_context)
9101 asm_fprintf (file, "\tmr %s,%s\n",
9102 reg_names[STATIC_CHAIN_REGNUM], reg_names[30]);
9103 break;
9105 case ABI_AIX:
9106 case ABI_DARWIN:
9107 /* Don't do anything, done in output_profile_hook (). */
9108 break;
9113 /* Adjust the cost of a scheduling dependency. Return the new cost of
9114 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
9116 static int
9117 rs6000_adjust_cost (insn, link, dep_insn, cost)
9118 rtx insn;
9119 rtx link;
9120 rtx dep_insn ATTRIBUTE_UNUSED;
9121 int cost;
9123 if (! recog_memoized (insn))
9124 return 0;
9126 if (REG_NOTE_KIND (link) != 0)
9127 return 0;
9129 if (REG_NOTE_KIND (link) == 0)
9131 /* Data dependency; DEP_INSN writes a register that INSN reads
9132 some cycles later. */
9133 switch (get_attr_type (insn))
9135 case TYPE_JMPREG:
9136 /* Tell the first scheduling pass about the latency between
9137 a mtctr and bctr (and mtlr and br/blr). The first
9138 scheduling pass will not know about this latency since
9139 the mtctr instruction, which has the latency associated
9140 to it, will be generated by reload. */
9141 return TARGET_POWER ? 5 : 4;
9142 case TYPE_BRANCH:
9143 /* Leave some extra cycles between a compare and its
9144 dependent branch, to inhibit expensive mispredicts. */
9145 if ((rs6000_cpu_attr == CPU_PPC750
9146 || rs6000_cpu_attr == CPU_PPC7400
9147 || rs6000_cpu_attr == CPU_PPC7450)
9148 && recog_memoized (dep_insn)
9149 && (INSN_CODE (dep_insn) >= 0)
9150 && (get_attr_type (dep_insn) == TYPE_COMPARE
9151 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
9152 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
9153 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL))
9154 return cost + 2;
9155 default:
9156 break;
9158 /* Fall out to return default cost. */
9161 return cost;
9164 /* A C statement (sans semicolon) to update the integer scheduling
9165 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
9166 INSN earlier, increase the priority to execute INSN later. Do not
9167 define this macro if you do not need to adjust the scheduling
9168 priorities of insns. */
9170 static int
9171 rs6000_adjust_priority (insn, priority)
9172 rtx insn ATTRIBUTE_UNUSED;
9173 int priority;
9175 /* On machines (like the 750) which have asymmetric integer units,
9176 where one integer unit can do multiply and divides and the other
9177 can't, reduce the priority of multiply/divide so it is scheduled
9178 before other integer operations. */
9180 #if 0
9181 if (! INSN_P (insn))
9182 return priority;
9184 if (GET_CODE (PATTERN (insn)) == USE)
9185 return priority;
9187 switch (rs6000_cpu_attr) {
9188 case CPU_PPC750:
9189 switch (get_attr_type (insn))
9191 default:
9192 break;
9194 case TYPE_IMUL:
9195 case TYPE_IDIV:
9196 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
9197 priority, priority);
9198 if (priority >= 0 && priority < 0x01000000)
9199 priority >>= 3;
9200 break;
9203 #endif
9205 return priority;
9208 /* Return how many instructions the machine can issue per cycle. */
9210 static int
9211 rs6000_issue_rate ()
9213 switch (rs6000_cpu_attr) {
9214 case CPU_RIOS1: /* ? */
9215 case CPU_RS64A:
9216 case CPU_PPC601: /* ? */
9217 case CPU_PPC7450:
9218 return 3;
9219 case CPU_PPC603:
9220 case CPU_PPC750:
9221 case CPU_PPC7400:
9222 return 2;
9223 case CPU_RIOS2:
9224 case CPU_PPC604:
9225 case CPU_PPC604E:
9226 case CPU_PPC620:
9227 case CPU_PPC630:
9228 return 4;
9229 default:
9230 return 1;
9235 /* Length in units of the trampoline for entering a nested function. */
9238 rs6000_trampoline_size ()
9240 int ret = 0;
9242 switch (DEFAULT_ABI)
9244 default:
9245 abort ();
9247 case ABI_AIX:
9248 ret = (TARGET_32BIT) ? 12 : 24;
9249 break;
9251 case ABI_V4:
9252 case ABI_AIX_NODESC:
9253 ret = (TARGET_32BIT) ? 40 : 48;
9254 break;
9257 return ret;
9260 /* Emit RTL insns to initialize the variable parts of a trampoline.
9261 FNADDR is an RTX for the address of the function's pure code.
9262 CXT is an RTX for the static chain value for the function. */
9264 void
9265 rs6000_initialize_trampoline (addr, fnaddr, cxt)
9266 rtx addr;
9267 rtx fnaddr;
9268 rtx cxt;
9270 enum machine_mode pmode = Pmode;
9271 int regsize = (TARGET_32BIT) ? 4 : 8;
9272 rtx ctx_reg = force_reg (pmode, cxt);
9274 switch (DEFAULT_ABI)
9276 default:
9277 abort ();
9279 /* Macros to shorten the code expansions below. */
9280 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
9281 #define MEM_PLUS(addr,offset) \
9282 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
9284 /* Under AIX, just build the 3 word function descriptor */
9285 case ABI_AIX:
9287 rtx fn_reg = gen_reg_rtx (pmode);
9288 rtx toc_reg = gen_reg_rtx (pmode);
9289 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
9290 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
9291 emit_move_insn (MEM_DEREF (addr), fn_reg);
9292 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
9293 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
9295 break;
9297 /* Under V.4/eabi, call __trampoline_setup to do the real work. */
9298 case ABI_V4:
9299 case ABI_AIX_NODESC:
9300 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
9301 FALSE, VOIDmode, 4,
9302 addr, pmode,
9303 GEN_INT (rs6000_trampoline_size ()), SImode,
9304 fnaddr, pmode,
9305 ctx_reg, pmode);
9306 break;
9309 return;
9313 /* Table of valid machine attributes. */
9315 const struct attribute_spec rs6000_attribute_table[] =
9317 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
9318 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
9319 { NULL, 0, 0, false, false, false, NULL }
9322 /* Handle a "longcall" attribute; arguments as in struct
9323 attribute_spec.handler. */
9325 static tree
9326 rs6000_handle_longcall_attribute (node, name, args, flags, no_add_attrs)
9327 tree *node;
9328 tree name;
9329 tree args ATTRIBUTE_UNUSED;
9330 int flags ATTRIBUTE_UNUSED;
9331 bool *no_add_attrs;
9333 if (TREE_CODE (*node) != FUNCTION_TYPE
9334 && TREE_CODE (*node) != FIELD_DECL
9335 && TREE_CODE (*node) != TYPE_DECL)
9337 warning ("`%s' attribute only applies to functions",
9338 IDENTIFIER_POINTER (name));
9339 *no_add_attrs = true;
9342 return NULL_TREE;
9345 /* Return a reference suitable for calling a function with the
9346 longcall attribute. */
9348 struct rtx_def *
9349 rs6000_longcall_ref (call_ref)
9350 rtx call_ref;
9352 const char *call_name;
9353 tree node;
9355 if (GET_CODE (call_ref) != SYMBOL_REF)
9356 return call_ref;
9358 /* System V adds '.' to the internal name, so skip them. */
9359 call_name = XSTR (call_ref, 0);
9360 if (*call_name == '.')
9362 while (*call_name == '.')
9363 call_name++;
9365 node = get_identifier (call_name);
9366 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
9369 return force_reg (Pmode, call_ref);
9373 /* A C statement or statements to switch to the appropriate section
9374 for output of RTX in mode MODE. You can assume that RTX is some
9375 kind of constant in RTL. The argument MODE is redundant except in
9376 the case of a `const_int' rtx. Select the section by calling
9377 `text_section' or one of the alternatives for other sections.
9379 Do not define this macro if you put all constants in the read-only
9380 data section. */
9382 #ifdef USING_ELFOS_H
9384 void
9385 rs6000_select_rtx_section (mode, x)
9386 enum machine_mode mode;
9387 rtx x;
9389 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
9390 toc_section ();
9391 else
9392 const_section ();
9395 /* A C statement or statements to switch to the appropriate
9396 section for output of DECL. DECL is either a `VAR_DECL' node
9397 or a constant of some sort. RELOC indicates whether forming
9398 the initial value of DECL requires link-time relocations. */
9400 void
9401 rs6000_select_section (decl, reloc)
9402 tree decl;
9403 int reloc;
9405 int size = int_size_in_bytes (TREE_TYPE (decl));
9406 int needs_sdata;
9407 int readonly;
9408 static void (* const sec_funcs[4]) PARAMS ((void)) = {
9409 &const_section,
9410 &sdata2_section,
9411 &data_section,
9412 &sdata_section
9415 needs_sdata = (size > 0
9416 && size <= g_switch_value
9417 && rs6000_sdata != SDATA_NONE
9418 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
9420 if (TREE_CODE (decl) == STRING_CST)
9421 readonly = ! flag_writable_strings;
9422 else if (TREE_CODE (decl) == VAR_DECL)
9423 readonly = (! (flag_pic && reloc)
9424 && TREE_READONLY (decl)
9425 && ! TREE_SIDE_EFFECTS (decl)
9426 && DECL_INITIAL (decl)
9427 && DECL_INITIAL (decl) != error_mark_node
9428 && TREE_CONSTANT (DECL_INITIAL (decl)));
9429 else if (TREE_CODE (decl) == CONSTRUCTOR)
9430 readonly = (! (flag_pic && reloc)
9431 && ! TREE_SIDE_EFFECTS (decl)
9432 && TREE_CONSTANT (decl));
9433 else
9434 readonly = 1;
9435 if (needs_sdata && rs6000_sdata != SDATA_EABI)
9436 readonly = 0;
9438 (*sec_funcs[(readonly ? 0 : 2) + (needs_sdata ? 1 : 0)])();
9441 /* A C statement to build up a unique section name, expressed as a
9442 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
9443 RELOC indicates whether the initial value of EXP requires
9444 link-time relocations. If you do not define this macro, GCC will use
9445 the symbol name prefixed by `.' as the section name. Note - this
9446 macro can now be called for uninitialized data items as well as
9447 initialised data and functions. */
9449 void
9450 rs6000_unique_section (decl, reloc)
9451 tree decl;
9452 int reloc;
9454 int size = int_size_in_bytes (TREE_TYPE (decl));
9455 int needs_sdata;
9456 int readonly;
9457 int len;
9458 int sec;
9459 const char *name;
9460 char *string;
9461 const char *prefix;
9463 static const char *const prefixes[7][2] =
9465 { ".text.", ".gnu.linkonce.t." },
9466 { ".rodata.", ".gnu.linkonce.r." },
9467 { ".sdata2.", ".gnu.linkonce.s2." },
9468 { ".data.", ".gnu.linkonce.d." },
9469 { ".sdata.", ".gnu.linkonce.s." },
9470 { ".bss.", ".gnu.linkonce.b." },
9471 { ".sbss.", ".gnu.linkonce.sb." }
9474 needs_sdata = (TREE_CODE (decl) != FUNCTION_DECL
9475 && size > 0
9476 && size <= g_switch_value
9477 && rs6000_sdata != SDATA_NONE
9478 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
9480 if (TREE_CODE (decl) == STRING_CST)
9481 readonly = ! flag_writable_strings;
9482 else if (TREE_CODE (decl) == VAR_DECL)
9483 readonly = (! (flag_pic && reloc)
9484 && TREE_READONLY (decl)
9485 && ! TREE_SIDE_EFFECTS (decl)
9486 && DECL_INITIAL (decl)
9487 && DECL_INITIAL (decl) != error_mark_node
9488 && TREE_CONSTANT (DECL_INITIAL (decl)));
9489 else
9490 readonly = 1;
9491 if (needs_sdata && rs6000_sdata != SDATA_EABI)
9492 readonly = 0;
9494 sec = ((TREE_CODE (decl) == FUNCTION_DECL ? 0 : 1)
9495 + (readonly ? 0 : 2)
9496 + (needs_sdata ? 1 : 0)
9497 + (DECL_INITIAL (decl) == 0
9498 || DECL_INITIAL (decl) == error_mark_node) ? 4 : 0);
9500 STRIP_NAME_ENCODING (name, IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)));
9501 prefix = prefixes[sec][DECL_ONE_ONLY (decl)];
9502 len = strlen (name) + strlen (prefix);
9503 string = alloca (len + 1);
9505 sprintf (string, "%s%s", prefix, name);
9507 DECL_SECTION_NAME (decl) = build_string (len, string);
9511 /* If we are referencing a function that is static or is known to be
9512 in this file, make the SYMBOL_REF special. We can use this to indicate
9513 that we can branch to this function without emitting a no-op after the
9514 call. For real AIX calling sequences, we also replace the
9515 function name with the real name (1 or 2 leading .'s), rather than
9516 the function descriptor name. This saves a lot of overriding code
9517 to read the prefixes. */
9519 void
9520 rs6000_encode_section_info (decl)
9521 tree decl;
9523 if (TREE_CODE (decl) == FUNCTION_DECL)
9525 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
9526 if ((TREE_ASM_WRITTEN (decl) || ! TREE_PUBLIC (decl))
9527 && ! DECL_WEAK (decl))
9528 SYMBOL_REF_FLAG (sym_ref) = 1;
9530 if (DEFAULT_ABI == ABI_AIX)
9532 size_t len1 = (DEFAULT_ABI == ABI_AIX) ? 1 : 2;
9533 size_t len2 = strlen (XSTR (sym_ref, 0));
9534 char *str = alloca (len1 + len2 + 1);
9535 str[0] = '.';
9536 str[1] = '.';
9537 memcpy (str + len1, XSTR (sym_ref, 0), len2 + 1);
9539 XSTR (sym_ref, 0) = ggc_alloc_string (str, len1 + len2);
9542 else if (rs6000_sdata != SDATA_NONE
9543 && DEFAULT_ABI == ABI_V4
9544 && TREE_CODE (decl) == VAR_DECL)
9546 int size = int_size_in_bytes (TREE_TYPE (decl));
9547 tree section_name = DECL_SECTION_NAME (decl);
9548 const char *name = (char *)0;
9549 int len = 0;
9551 if (section_name)
9553 if (TREE_CODE (section_name) == STRING_CST)
9555 name = TREE_STRING_POINTER (section_name);
9556 len = TREE_STRING_LENGTH (section_name);
9558 else
9559 abort ();
9562 if ((size > 0 && size <= g_switch_value)
9563 || (name
9564 && ((len == sizeof (".sdata") - 1
9565 && strcmp (name, ".sdata") == 0)
9566 || (len == sizeof (".sdata2") - 1
9567 && strcmp (name, ".sdata2") == 0)
9568 || (len == sizeof (".sbss") - 1
9569 && strcmp (name, ".sbss") == 0)
9570 || (len == sizeof (".sbss2") - 1
9571 && strcmp (name, ".sbss2") == 0)
9572 || (len == sizeof (".PPC.EMB.sdata0") - 1
9573 && strcmp (name, ".PPC.EMB.sdata0") == 0)
9574 || (len == sizeof (".PPC.EMB.sbss0") - 1
9575 && strcmp (name, ".PPC.EMB.sbss0") == 0))))
9577 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
9578 size_t len = strlen (XSTR (sym_ref, 0));
9579 char *str = alloca (len + 2);
9581 str[0] = '@';
9582 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
9583 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
9588 #endif /* USING_ELFOS_H */
9591 /* Return a REG that occurs in ADDR with coefficient 1.
9592 ADDR can be effectively incremented by incrementing REG.
9594 r0 is special and we must not select it as an address
9595 register by this routine since our caller will try to
9596 increment the returned register via an "la" instruction. */
9598 struct rtx_def *
9599 find_addr_reg (addr)
9600 rtx addr;
9602 while (GET_CODE (addr) == PLUS)
9604 if (GET_CODE (XEXP (addr, 0)) == REG
9605 && REGNO (XEXP (addr, 0)) != 0)
9606 addr = XEXP (addr, 0);
9607 else if (GET_CODE (XEXP (addr, 1)) == REG
9608 && REGNO (XEXP (addr, 1)) != 0)
9609 addr = XEXP (addr, 1);
9610 else if (CONSTANT_P (XEXP (addr, 0)))
9611 addr = XEXP (addr, 1);
9612 else if (CONSTANT_P (XEXP (addr, 1)))
9613 addr = XEXP (addr, 0);
9614 else
9615 abort ();
9617 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
9618 return addr;
9619 abort ();
9622 void
9623 rs6000_fatal_bad_address (op)
9624 rtx op;
9626 fatal_insn ("bad address", op);
9629 /* Called to register all of our global variables with the garbage
9630 collector. */
9632 static void
9633 rs6000_add_gc_roots ()
9635 ggc_add_rtx_root (&rs6000_compare_op0, 1);
9636 ggc_add_rtx_root (&rs6000_compare_op1, 1);
9638 toc_hash_table = htab_create (1021, toc_hash_function, toc_hash_eq, NULL);
9639 ggc_add_root (&toc_hash_table, 1, sizeof (toc_hash_table),
9640 toc_hash_mark_table);
9642 #if TARGET_MACHO
9643 machopic_add_gc_roots ();
9644 #endif
9647 #if TARGET_MACHO
9649 #if 0
9650 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
9651 reference and a constant. */
9654 symbolic_operand (op)
9655 rtx op;
9657 switch (GET_CODE (op))
9659 case SYMBOL_REF:
9660 case LABEL_REF:
9661 return 1;
9662 case CONST:
9663 op = XEXP (op, 0);
9664 return (GET_CODE (op) == SYMBOL_REF ||
9665 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
9666 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
9667 && GET_CODE (XEXP (op, 1)) == CONST_INT);
9668 default:
9669 return 0;
9672 #endif
9674 #ifdef RS6000_LONG_BRANCH
9676 static tree stub_list = 0;
9678 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
9679 procedure calls to the linked list. */
9681 void
9682 add_compiler_stub (label_name, function_name, line_number)
9683 tree label_name;
9684 tree function_name;
9685 int line_number;
9687 tree stub = build_tree_list (function_name, label_name);
9688 TREE_TYPE (stub) = build_int_2 (line_number, 0);
9689 TREE_CHAIN (stub) = stub_list;
9690 stub_list = stub;
9693 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
9694 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
9695 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
9697 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
9698 handling procedure calls from the linked list and initializes the
9699 linked list. */
9701 void
9702 output_compiler_stub ()
9704 char tmp_buf[256];
9705 char label_buf[256];
9706 char *label;
9707 tree tmp_stub, stub;
9709 if (!flag_pic)
9710 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
9712 fprintf (asm_out_file,
9713 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
9715 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
9716 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
9717 fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
9718 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
9720 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
9721 strcpy (label_buf,
9722 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
9723 else
9725 label_buf[0] = '_';
9726 strcpy (label_buf+1,
9727 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
9730 strcpy (tmp_buf, "lis r12,hi16(");
9731 strcat (tmp_buf, label_buf);
9732 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
9733 strcat (tmp_buf, label_buf);
9734 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
9735 output_asm_insn (tmp_buf, 0);
9737 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
9738 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
9739 fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
9740 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
9743 stub_list = 0;
9746 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
9747 already there or not. */
9750 no_previous_def (function_name)
9751 tree function_name;
9753 tree stub;
9754 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
9755 if (function_name == STUB_FUNCTION_NAME (stub))
9756 return 0;
9757 return 1;
9760 /* GET_PREV_LABEL gets the label name from the previous definition of
9761 the function. */
9763 tree
9764 get_prev_label (function_name)
9765 tree function_name;
9767 tree stub;
9768 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
9769 if (function_name == STUB_FUNCTION_NAME (stub))
9770 return STUB_LABEL_NAME (stub);
9771 return 0;
9774 /* INSN is either a function call or a millicode call. It may have an
9775 unconditional jump in its delay slot.
9777 CALL_DEST is the routine we are calling. */
9779 char *
9780 output_call (insn, call_dest, operand_number)
9781 rtx insn;
9782 rtx call_dest;
9783 int operand_number;
9785 static char buf[256];
9786 if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
9788 tree labelname;
9789 tree funname = get_identifier (XSTR (call_dest, 0));
9791 if (no_previous_def (funname))
9793 int line_number;
9794 rtx label_rtx = gen_label_rtx ();
9795 char *label_buf, temp_buf[256];
9796 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
9797 CODE_LABEL_NUMBER (label_rtx));
9798 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
9799 labelname = get_identifier (label_buf);
9800 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
9801 if (insn)
9802 line_number = NOTE_LINE_NUMBER (insn);
9803 add_compiler_stub (labelname, funname, line_number);
9805 else
9806 labelname = get_prev_label (funname);
9808 sprintf (buf, "jbsr %%z%d,%.246s",
9809 operand_number, IDENTIFIER_POINTER (labelname));
9810 return buf;
9812 else
9814 sprintf (buf, "bl %%z%d", operand_number);
9815 return buf;
9819 #endif /* RS6000_LONG_BRANCH */
9821 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
9822 do { \
9823 const char *const symbol_ = (SYMBOL); \
9824 char *buffer_ = (BUF); \
9825 if (symbol_[0] == '"') \
9827 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
9829 else if (name_needs_quotes(symbol_)) \
9831 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
9833 else \
9835 sprintf(buffer_, "L%d$%s", (N), symbol_); \
9837 } while (0)
9840 /* Generate PIC and indirect symbol stubs. */
9842 void
9843 machopic_output_stub (file, symb, stub)
9844 FILE *file;
9845 const char *symb, *stub;
9847 unsigned int length;
9848 char *symbol_name, *lazy_ptr_name;
9849 char *local_label_0;
9850 static int label = 0;
9852 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
9853 STRIP_NAME_ENCODING (symb, symb);
9855 label += 1;
9857 length = strlen (symb);
9858 symbol_name = alloca (length + 32);
9859 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
9861 lazy_ptr_name = alloca (length + 32);
9862 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
9864 local_label_0 = alloca (length + 32);
9865 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0, symb, length, 0);
9867 if (flag_pic == 2)
9868 machopic_picsymbol_stub_section ();
9869 else
9870 machopic_symbol_stub_section ();
9872 fprintf (file, "%s:\n", stub);
9873 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
9875 if (flag_pic == 2)
9877 fprintf (file, "\tmflr r0\n");
9878 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
9879 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
9880 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
9881 lazy_ptr_name, local_label_0);
9882 fprintf (file, "\tmtlr r0\n");
9883 fprintf (file, "\tlwz r12,lo16(%s-%s)(r11)\n",
9884 lazy_ptr_name, local_label_0);
9885 fprintf (file, "\tmtctr r12\n");
9886 fprintf (file, "\taddi r11,r11,lo16(%s-%s)\n",
9887 lazy_ptr_name, local_label_0);
9888 fprintf (file, "\tbctr\n");
9890 else
9891 fprintf (file, "non-pure not supported\n");
9893 machopic_lazy_symbol_ptr_section ();
9894 fprintf (file, "%s:\n", lazy_ptr_name);
9895 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
9896 fprintf (file, "\t.long dyld_stub_binding_helper\n");
9899 /* Legitimize PIC addresses. If the address is already
9900 position-independent, we return ORIG. Newly generated
9901 position-independent addresses go into a reg. This is REG if non
9902 zero, otherwise we allocate register(s) as necessary. */
9904 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x4000) < 0x8000)
9907 rs6000_machopic_legitimize_pic_address (orig, mode, reg)
9908 rtx orig;
9909 enum machine_mode mode;
9910 rtx reg;
9912 rtx base, offset;
9914 if (reg == NULL && ! reload_in_progress && ! reload_completed)
9915 reg = gen_reg_rtx (Pmode);
9917 if (GET_CODE (orig) == CONST)
9919 if (GET_CODE (XEXP (orig, 0)) == PLUS
9920 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
9921 return orig;
9923 if (GET_CODE (XEXP (orig, 0)) == PLUS)
9925 base =
9926 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
9927 Pmode, reg);
9928 offset =
9929 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
9930 Pmode, reg);
9932 else
9933 abort ();
9935 if (GET_CODE (offset) == CONST_INT)
9937 if (SMALL_INT (offset))
9938 return plus_constant (base, INTVAL (offset));
9939 else if (! reload_in_progress && ! reload_completed)
9940 offset = force_reg (Pmode, offset);
9941 else
9942 abort ();
9944 return gen_rtx (PLUS, Pmode, base, offset);
9947 /* Fall back on generic machopic code. */
9948 return machopic_legitimize_pic_address (orig, mode, reg);
9951 /* This is just a placeholder to make linking work without having to
9952 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
9953 ever needed for Darwin (not too likely!) this would have to get a
9954 real definition. */
9956 void
9957 toc_section ()
9961 #endif /* TARGET_MACHO */
9963 #if TARGET_ELF
9964 static unsigned int
9965 rs6000_elf_section_type_flags (decl, name, reloc)
9966 tree decl;
9967 const char *name;
9968 int reloc;
9970 unsigned int flags = default_section_type_flags (decl, name, reloc);
9972 if (TARGET_RELOCATABLE)
9973 flags |= SECTION_WRITE;
9975 return flags;
9978 /* Record an element in the table of global constructors. SYMBOL is
9979 a SYMBOL_REF of the function to be called; PRIORITY is a number
9980 between 0 and MAX_INIT_PRIORITY.
9982 This differs from default_named_section_asm_out_constructor in
9983 that we have special handling for -mrelocatable. */
9985 static void
9986 rs6000_elf_asm_out_constructor (symbol, priority)
9987 rtx symbol;
9988 int priority;
9990 const char *section = ".ctors";
9991 char buf[16];
9993 if (priority != DEFAULT_INIT_PRIORITY)
9995 sprintf (buf, ".ctors.%.5u",
9996 /* Invert the numbering so the linker puts us in the proper
9997 order; constructors are run from right to left, and the
9998 linker sorts in increasing order. */
9999 MAX_INIT_PRIORITY - priority);
10000 section = buf;
10003 named_section_flags (section, SECTION_WRITE);
10004 assemble_align (POINTER_SIZE);
10006 if (TARGET_RELOCATABLE)
10008 fputs ("\t.long (", asm_out_file);
10009 output_addr_const (asm_out_file, symbol);
10010 fputs (")@fixup\n", asm_out_file);
10012 else
10013 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
10016 static void
10017 rs6000_elf_asm_out_destructor (symbol, priority)
10018 rtx symbol;
10019 int priority;
10021 const char *section = ".dtors";
10022 char buf[16];
10024 if (priority != DEFAULT_INIT_PRIORITY)
10026 sprintf (buf, ".dtors.%.5u",
10027 /* Invert the numbering so the linker puts us in the proper
10028 order; constructors are run from right to left, and the
10029 linker sorts in increasing order. */
10030 MAX_INIT_PRIORITY - priority);
10031 section = buf;
10034 named_section_flags (section, SECTION_WRITE);
10035 assemble_align (POINTER_SIZE);
10037 if (TARGET_RELOCATABLE)
10039 fputs ("\t.long (", asm_out_file);
10040 output_addr_const (asm_out_file, symbol);
10041 fputs (")@fixup\n", asm_out_file);
10043 else
10044 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
10046 #endif
10048 #ifdef OBJECT_FORMAT_COFF
10049 static void
10050 xcoff_asm_named_section (name, flags)
10051 const char *name;
10052 unsigned int flags ATTRIBUTE_UNUSED;
10054 fprintf (asm_out_file, "\t.csect %s\n", name);
10056 #endif