2003-03-10 Aldy Hernandez <aldyh@redhat.com>
[official-gcc.git] / gcc / config / rs6000 / rs6000.c
blob91c91ad7ba4a9015f09539a937a183f8c78e8ec2
1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
34 #include "flags.h"
35 #include "recog.h"
36 #include "obstack.h"
37 #include "tree.h"
38 #include "expr.h"
39 #include "optabs.h"
40 #include "except.h"
41 #include "function.h"
42 #include "output.h"
43 #include "basic-block.h"
44 #include "integrate.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "hashtab.h"
48 #include "tm_p.h"
49 #include "target.h"
50 #include "target-def.h"
51 #include "langhooks.h"
52 #include "reload.h"
54 #ifndef TARGET_NO_PROTOTYPE
55 #define TARGET_NO_PROTOTYPE 0
56 #endif
58 #define min(A,B) ((A) < (B) ? (A) : (B))
59 #define max(A,B) ((A) > (B) ? (A) : (B))
61 /* Target cpu type */
63 enum processor_type rs6000_cpu;
64 struct rs6000_cpu_select rs6000_select[3] =
66 /* switch name, tune arch */
67 { (const char *)0, "--with-cpu=", 1, 1 },
68 { (const char *)0, "-mcpu=", 1, 1 },
69 { (const char *)0, "-mtune=", 1, 0 },
72 /* Size of long double */
73 const char *rs6000_long_double_size_string;
74 int rs6000_long_double_type_size;
76 /* Whether -mabi=altivec has appeared */
77 int rs6000_altivec_abi;
79 /* Whether VRSAVE instructions should be generated. */
80 int rs6000_altivec_vrsave;
82 /* String from -mvrsave= option. */
83 const char *rs6000_altivec_vrsave_string;
85 /* Nonzero if we want SPE ABI extensions. */
86 int rs6000_spe_abi;
88 /* Whether isel instructions should be generated. */
89 int rs6000_isel;
91 /* Nonzero if we have FPRs. */
92 int rs6000_fprs = 1;
94 /* String from -misel=. */
95 const char *rs6000_isel_string;
97 /* Set to nonzero once AIX common-mode calls have been defined. */
98 static GTY(()) int common_mode_defined;
100 /* Private copy of original value of flag_pic for ABI_AIX. */
101 static int rs6000_flag_pic;
103 /* Save information from a "cmpxx" operation until the branch or scc is
104 emitted. */
105 rtx rs6000_compare_op0, rs6000_compare_op1;
106 int rs6000_compare_fp_p;
108 /* Label number of label created for -mrelocatable, to call to so we can
109 get the address of the GOT section */
110 int rs6000_pic_labelno;
112 #ifdef USING_ELFOS_H
113 /* Which abi to adhere to */
114 const char *rs6000_abi_name = RS6000_ABI_NAME;
116 /* Semantics of the small data area */
117 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
119 /* Which small data model to use */
120 const char *rs6000_sdata_name = (char *)0;
122 /* Counter for labels which are to be placed in .fixup. */
123 int fixuplabelno = 0;
124 #endif
126 /* ABI enumeration available for subtarget to use. */
127 enum rs6000_abi rs6000_current_abi;
129 /* ABI string from -mabi= option. */
130 const char *rs6000_abi_string;
132 /* Debug flags */
133 const char *rs6000_debug_name;
134 int rs6000_debug_stack; /* debug stack applications */
135 int rs6000_debug_arg; /* debug argument handling */
137 const char *rs6000_traceback_name;
138 static enum {
139 traceback_default = 0,
140 traceback_none,
141 traceback_part,
142 traceback_full
143 } rs6000_traceback;
145 /* Flag to say the TOC is initialized */
146 int toc_initialized;
147 char toc_label_name[10];
149 /* Alias set for saves and restores from the rs6000 stack. */
150 static int rs6000_sr_alias_set;
152 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
153 The only place that looks at this is rs6000_set_default_type_attributes;
154 everywhere else should rely on the presence or absence of a longcall
155 attribute on the function declaration. */
156 int rs6000_default_long_calls;
157 const char *rs6000_longcall_switch;
159 struct builtin_description
161 /* mask is not const because we're going to alter it below. This
162 nonsense will go away when we rewrite the -march infrastructure
163 to give us more target flag bits. */
164 unsigned int mask;
165 const enum insn_code icode;
166 const char *const name;
167 const enum rs6000_builtins code;
170 static bool rs6000_function_ok_for_sibcall PARAMS ((tree, tree));
171 static int num_insns_constant_wide PARAMS ((HOST_WIDE_INT));
172 static void validate_condition_mode
173 PARAMS ((enum rtx_code, enum machine_mode));
174 static rtx rs6000_generate_compare PARAMS ((enum rtx_code));
175 static void rs6000_maybe_dead PARAMS ((rtx));
176 static void rs6000_emit_stack_tie PARAMS ((void));
177 static void rs6000_frame_related PARAMS ((rtx, rtx, HOST_WIDE_INT, rtx, rtx));
178 static rtx spe_synthesize_frame_save PARAMS ((rtx));
179 static bool spe_func_has_64bit_regs_p PARAMS ((void));
180 static void emit_frame_save PARAMS ((rtx, rtx, enum machine_mode,
181 unsigned int, int, int));
182 static rtx gen_frame_mem_offset PARAMS ((enum machine_mode, rtx, int));
183 static void rs6000_emit_allocate_stack PARAMS ((HOST_WIDE_INT, int));
184 static unsigned rs6000_hash_constant PARAMS ((rtx));
185 static unsigned toc_hash_function PARAMS ((const void *));
186 static int toc_hash_eq PARAMS ((const void *, const void *));
187 static int constant_pool_expr_1 PARAMS ((rtx, int *, int *));
188 static struct machine_function * rs6000_init_machine_status PARAMS ((void));
189 static bool rs6000_assemble_integer PARAMS ((rtx, unsigned int, int));
190 #ifdef HAVE_GAS_HIDDEN
191 static void rs6000_assemble_visibility PARAMS ((tree, int));
192 #endif
193 static int rs6000_ra_ever_killed PARAMS ((void));
194 static tree rs6000_handle_longcall_attribute PARAMS ((tree *, tree, tree, int, bool *));
195 const struct attribute_spec rs6000_attribute_table[];
196 static void rs6000_set_default_type_attributes PARAMS ((tree));
197 static void rs6000_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
198 static void rs6000_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
199 static void rs6000_output_mi_thunk PARAMS ((FILE *, tree, HOST_WIDE_INT,
200 HOST_WIDE_INT, tree));
201 static rtx rs6000_emit_set_long_const PARAMS ((rtx,
202 HOST_WIDE_INT, HOST_WIDE_INT));
203 #if TARGET_ELF
204 static unsigned int rs6000_elf_section_type_flags PARAMS ((tree, const char *,
205 int));
206 static void rs6000_elf_asm_out_constructor PARAMS ((rtx, int));
207 static void rs6000_elf_asm_out_destructor PARAMS ((rtx, int));
208 static void rs6000_elf_select_section PARAMS ((tree, int,
209 unsigned HOST_WIDE_INT));
210 static void rs6000_elf_unique_section PARAMS ((tree, int));
211 static void rs6000_elf_select_rtx_section PARAMS ((enum machine_mode, rtx,
212 unsigned HOST_WIDE_INT));
213 static void rs6000_elf_encode_section_info PARAMS ((tree, int))
214 ATTRIBUTE_UNUSED;
215 static const char *rs6000_elf_strip_name_encoding PARAMS ((const char *));
216 static bool rs6000_elf_in_small_data_p PARAMS ((tree));
217 #endif
218 #if TARGET_XCOFF
219 static void rs6000_xcoff_asm_globalize_label PARAMS ((FILE *, const char *));
220 static void rs6000_xcoff_asm_named_section PARAMS ((const char *, unsigned int));
221 static void rs6000_xcoff_select_section PARAMS ((tree, int,
222 unsigned HOST_WIDE_INT));
223 static void rs6000_xcoff_unique_section PARAMS ((tree, int));
224 static void rs6000_xcoff_select_rtx_section PARAMS ((enum machine_mode, rtx,
225 unsigned HOST_WIDE_INT));
226 static const char * rs6000_xcoff_strip_name_encoding PARAMS ((const char *));
227 static unsigned int rs6000_xcoff_section_type_flags PARAMS ((tree, const char *, int));
228 #endif
229 static void rs6000_xcoff_encode_section_info PARAMS ((tree, int))
230 ATTRIBUTE_UNUSED;
231 static bool rs6000_binds_local_p PARAMS ((tree));
232 static int rs6000_use_dfa_pipeline_interface PARAMS ((void));
233 static int rs6000_variable_issue PARAMS ((FILE *, int, rtx, int));
234 static bool rs6000_rtx_costs PARAMS ((rtx, int, int, int *));
235 static int rs6000_adjust_cost PARAMS ((rtx, rtx, rtx, int));
236 static int rs6000_adjust_priority PARAMS ((rtx, int));
237 static int rs6000_issue_rate PARAMS ((void));
239 static void rs6000_init_builtins PARAMS ((void));
240 static rtx rs6000_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx));
241 static rtx rs6000_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx));
242 static rtx rs6000_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx));
243 static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int));
244 static void altivec_init_builtins PARAMS ((void));
245 static void rs6000_common_init_builtins PARAMS ((void));
247 static void enable_mask_for_builtins PARAMS ((struct builtin_description *,
248 int, enum rs6000_builtins,
249 enum rs6000_builtins));
250 static void spe_init_builtins PARAMS ((void));
251 static rtx spe_expand_builtin PARAMS ((tree, rtx, bool *));
252 static rtx spe_expand_predicate_builtin PARAMS ((enum insn_code, tree, rtx));
253 static rtx spe_expand_evsel_builtin PARAMS ((enum insn_code, tree, rtx));
254 static int rs6000_emit_int_cmove PARAMS ((rtx, rtx, rtx, rtx));
256 static rtx altivec_expand_builtin PARAMS ((tree, rtx, bool *));
257 static rtx altivec_expand_ld_builtin PARAMS ((tree, rtx, bool *));
258 static rtx altivec_expand_st_builtin PARAMS ((tree, rtx, bool *));
259 static rtx altivec_expand_dst_builtin PARAMS ((tree, rtx, bool *));
260 static rtx altivec_expand_abs_builtin PARAMS ((enum insn_code, tree, rtx));
261 static rtx altivec_expand_predicate_builtin PARAMS ((enum insn_code, const char *, tree, rtx));
262 static rtx altivec_expand_stv_builtin PARAMS ((enum insn_code, tree));
263 static void rs6000_parse_abi_options PARAMS ((void));
264 static void rs6000_parse_vrsave_option PARAMS ((void));
265 static void rs6000_parse_isel_option PARAMS ((void));
266 static int first_altivec_reg_to_save PARAMS ((void));
267 static unsigned int compute_vrsave_mask PARAMS ((void));
268 static void is_altivec_return_reg PARAMS ((rtx, void *));
269 static rtx generate_set_vrsave PARAMS ((rtx, rs6000_stack_t *, int));
270 static void altivec_frame_fixup PARAMS ((rtx, rtx, HOST_WIDE_INT));
271 static int easy_vector_constant PARAMS ((rtx));
272 static bool is_ev64_opaque_type PARAMS ((tree));
273 static rtx rs6000_dwarf_register_span PARAMS ((rtx));
275 /* Hash table stuff for keeping track of TOC entries. */
277 struct toc_hash_struct GTY(())
279 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
280 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
281 rtx key;
282 enum machine_mode key_mode;
283 int labelno;
286 static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
288 /* Default register names. */
289 char rs6000_reg_names[][8] =
291 "0", "1", "2", "3", "4", "5", "6", "7",
292 "8", "9", "10", "11", "12", "13", "14", "15",
293 "16", "17", "18", "19", "20", "21", "22", "23",
294 "24", "25", "26", "27", "28", "29", "30", "31",
295 "0", "1", "2", "3", "4", "5", "6", "7",
296 "8", "9", "10", "11", "12", "13", "14", "15",
297 "16", "17", "18", "19", "20", "21", "22", "23",
298 "24", "25", "26", "27", "28", "29", "30", "31",
299 "mq", "lr", "ctr","ap",
300 "0", "1", "2", "3", "4", "5", "6", "7",
301 "xer",
302 /* AltiVec registers. */
303 "0", "1", "2", "3", "4", "5", "6", "7",
304 "8", "9", "10", "11", "12", "13", "14", "15",
305 "16", "17", "18", "19", "20", "21", "22", "23",
306 "24", "25", "26", "27", "28", "29", "30", "31",
307 "vrsave", "vscr",
308 /* SPE registers. */
309 "spe_acc", "spefscr"
312 #ifdef TARGET_REGNAMES
313 static const char alt_reg_names[][8] =
315 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
316 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
317 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
318 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
319 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
320 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
321 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
322 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
323 "mq", "lr", "ctr", "ap",
324 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
325 "xer",
326 /* AltiVec registers. */
327 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
328 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
329 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
330 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
331 "vrsave", "vscr",
332 /* SPE registers. */
333 "spe_acc", "spefscr"
335 #endif
337 #ifndef MASK_STRICT_ALIGN
338 #define MASK_STRICT_ALIGN 0
339 #endif
341 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
342 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
344 /* Initialize the GCC target structure. */
345 #undef TARGET_ATTRIBUTE_TABLE
346 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
347 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
348 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
350 #undef TARGET_ASM_ALIGNED_DI_OP
351 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
353 /* Default unaligned ops are only provided for ELF. Find the ops needed
354 for non-ELF systems. */
355 #ifndef OBJECT_FORMAT_ELF
356 #if TARGET_XCOFF
357 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
358 64-bit targets. */
359 #undef TARGET_ASM_UNALIGNED_HI_OP
360 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
361 #undef TARGET_ASM_UNALIGNED_SI_OP
362 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
363 #undef TARGET_ASM_UNALIGNED_DI_OP
364 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
365 #else
366 /* For Darwin. */
367 #undef TARGET_ASM_UNALIGNED_HI_OP
368 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
369 #undef TARGET_ASM_UNALIGNED_SI_OP
370 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
371 #endif
372 #endif
374 /* This hook deals with fixups for relocatable code and DI-mode objects
375 in 64-bit code. */
376 #undef TARGET_ASM_INTEGER
377 #define TARGET_ASM_INTEGER rs6000_assemble_integer
379 #ifdef HAVE_GAS_HIDDEN
380 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
381 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
382 #endif
384 #undef TARGET_ASM_FUNCTION_PROLOGUE
385 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
386 #undef TARGET_ASM_FUNCTION_EPILOGUE
387 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
389 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
390 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE rs6000_use_dfa_pipeline_interface
391 #undef TARGET_SCHED_VARIABLE_ISSUE
392 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
394 #undef TARGET_SCHED_ISSUE_RATE
395 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
396 #undef TARGET_SCHED_ADJUST_COST
397 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
398 #undef TARGET_SCHED_ADJUST_PRIORITY
399 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
401 #undef TARGET_INIT_BUILTINS
402 #define TARGET_INIT_BUILTINS rs6000_init_builtins
404 #undef TARGET_EXPAND_BUILTIN
405 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
407 #undef TARGET_BINDS_LOCAL_P
408 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
410 #undef TARGET_ASM_OUTPUT_MI_THUNK
411 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
413 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
414 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
416 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
417 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
419 #undef TARGET_RTX_COSTS
420 #define TARGET_RTX_COSTS rs6000_rtx_costs
421 #undef TARGET_ADDRESS_COST
422 #define TARGET_ADDRESS_COST hook_int_rtx_0
424 #undef TARGET_VECTOR_OPAQUE_P
425 #define TARGET_VECTOR_OPAQUE_P is_ev64_opaque_type
427 #undef TARGET_DWARF_REGISTER_SPAN
428 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
430 struct gcc_target targetm = TARGET_INITIALIZER;
432 /* Override command line options. Mostly we process the processor
433 type and sometimes adjust other TARGET_ options. */
435 void
436 rs6000_override_options (default_cpu)
437 const char *default_cpu;
439 size_t i, j;
440 struct rs6000_cpu_select *ptr;
442 /* Simplify the entries below by making a mask for any POWER
443 variant and any PowerPC variant. */
445 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
446 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
447 | MASK_PPC_GFXOPT | MASK_POWERPC64)
448 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
450 static struct ptt
452 const char *const name; /* Canonical processor name. */
453 const enum processor_type processor; /* Processor type enum value. */
454 const int target_enable; /* Target flags to enable. */
455 const int target_disable; /* Target flags to disable. */
456 } const processor_target_table[]
457 = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
458 POWER_MASKS | POWERPC_MASKS},
459 {"power", PROCESSOR_POWER,
460 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
461 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
462 {"power2", PROCESSOR_POWER,
463 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
464 POWERPC_MASKS | MASK_NEW_MNEMONICS},
465 {"power3", PROCESSOR_PPC630,
466 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
467 POWER_MASKS},
468 {"power4", PROCESSOR_POWER4,
469 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
470 POWER_MASKS | MASK_PPC_GPOPT},
471 {"powerpc", PROCESSOR_POWERPC,
472 MASK_POWERPC | MASK_NEW_MNEMONICS,
473 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
474 {"powerpc64", PROCESSOR_POWERPC64,
475 MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
476 POWER_MASKS | POWERPC_OPT_MASKS},
477 {"rios", PROCESSOR_RIOS1,
478 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
479 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
480 {"rios1", PROCESSOR_RIOS1,
481 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
482 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
483 {"rsc", PROCESSOR_PPC601,
484 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
485 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
486 {"rsc1", PROCESSOR_PPC601,
487 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
488 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
489 {"rios2", PROCESSOR_RIOS2,
490 MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
491 POWERPC_MASKS | MASK_NEW_MNEMONICS},
492 {"rs64a", PROCESSOR_RS64A,
493 MASK_POWERPC | MASK_NEW_MNEMONICS,
494 POWER_MASKS | POWERPC_OPT_MASKS},
495 {"401", PROCESSOR_PPC403,
496 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
497 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
498 {"403", PROCESSOR_PPC403,
499 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
500 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
501 {"405", PROCESSOR_PPC405,
502 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
503 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
504 {"405f", PROCESSOR_PPC405,
505 MASK_POWERPC | MASK_NEW_MNEMONICS,
506 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
507 {"505", PROCESSOR_MPCCORE,
508 MASK_POWERPC | MASK_NEW_MNEMONICS,
509 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
510 {"601", PROCESSOR_PPC601,
511 MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
512 MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
513 {"602", PROCESSOR_PPC603,
514 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
515 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
516 {"603", PROCESSOR_PPC603,
517 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
518 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
519 {"603e", PROCESSOR_PPC603,
520 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
521 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
522 {"ec603e", PROCESSOR_PPC603,
523 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
524 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
525 {"604", PROCESSOR_PPC604,
526 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
527 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
528 {"604e", PROCESSOR_PPC604e,
529 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
530 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
531 {"620", PROCESSOR_PPC620,
532 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
533 POWER_MASKS},
534 {"630", PROCESSOR_PPC630,
535 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
536 POWER_MASKS},
537 {"740", PROCESSOR_PPC750,
538 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
539 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
540 {"750", PROCESSOR_PPC750,
541 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
542 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
543 {"7400", PROCESSOR_PPC7400,
544 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
545 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
546 {"7450", PROCESSOR_PPC7450,
547 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
548 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
549 {"8540", PROCESSOR_PPC8540,
550 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
551 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
552 {"801", PROCESSOR_MPCCORE,
553 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
554 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
555 {"821", PROCESSOR_MPCCORE,
556 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
557 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
558 {"823", PROCESSOR_MPCCORE,
559 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
560 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
561 {"860", PROCESSOR_MPCCORE,
562 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
563 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64}};
565 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
567 /* Save current -mmultiple/-mno-multiple status. */
568 int multiple = TARGET_MULTIPLE;
569 /* Save current -mstring/-mno-string status. */
570 int string = TARGET_STRING;
572 /* Identify the processor type. */
573 rs6000_select[0].string = default_cpu;
574 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
576 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
578 ptr = &rs6000_select[i];
579 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
581 for (j = 0; j < ptt_size; j++)
582 if (! strcmp (ptr->string, processor_target_table[j].name))
584 if (ptr->set_tune_p)
585 rs6000_cpu = processor_target_table[j].processor;
587 if (ptr->set_arch_p)
589 target_flags |= processor_target_table[j].target_enable;
590 target_flags &= ~processor_target_table[j].target_disable;
592 break;
595 if (j == ptt_size)
596 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
600 if (rs6000_cpu == PROCESSOR_PPC8540)
601 rs6000_isel = 1;
603 /* If we are optimizing big endian systems for space, use the load/store
604 multiple and string instructions. */
605 if (BYTES_BIG_ENDIAN && optimize_size)
606 target_flags |= MASK_MULTIPLE | MASK_STRING;
608 /* If -mmultiple or -mno-multiple was explicitly used, don't
609 override with the processor default */
610 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
611 target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
613 /* If -mstring or -mno-string was explicitly used, don't override
614 with the processor default. */
615 if ((target_flags_explicit & MASK_STRING) != 0)
616 target_flags = (target_flags & ~MASK_STRING) | string;
618 /* Don't allow -mmultiple or -mstring on little endian systems
619 unless the cpu is a 750, because the hardware doesn't support the
620 instructions used in little endian mode, and causes an alignment
621 trap. The 750 does not cause an alignment trap (except when the
622 target is unaligned). */
624 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
626 if (TARGET_MULTIPLE)
628 target_flags &= ~MASK_MULTIPLE;
629 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
630 warning ("-mmultiple is not supported on little endian systems");
633 if (TARGET_STRING)
635 target_flags &= ~MASK_STRING;
636 if ((target_flags_explicit & MASK_STRING) != 0)
637 warning ("-mstring is not supported on little endian systems");
641 if (flag_pic != 0 && DEFAULT_ABI == ABI_AIX)
643 rs6000_flag_pic = flag_pic;
644 flag_pic = 0;
647 /* For Darwin, always silently make -fpic and -fPIC identical. */
648 if (flag_pic == 1 && DEFAULT_ABI == ABI_DARWIN)
649 flag_pic = 2;
651 /* Set debug flags */
652 if (rs6000_debug_name)
654 if (! strcmp (rs6000_debug_name, "all"))
655 rs6000_debug_stack = rs6000_debug_arg = 1;
656 else if (! strcmp (rs6000_debug_name, "stack"))
657 rs6000_debug_stack = 1;
658 else if (! strcmp (rs6000_debug_name, "arg"))
659 rs6000_debug_arg = 1;
660 else
661 error ("unknown -mdebug-%s switch", rs6000_debug_name);
664 if (rs6000_traceback_name)
666 if (! strncmp (rs6000_traceback_name, "full", 4))
667 rs6000_traceback = traceback_full;
668 else if (! strncmp (rs6000_traceback_name, "part", 4))
669 rs6000_traceback = traceback_part;
670 else if (! strncmp (rs6000_traceback_name, "no", 2))
671 rs6000_traceback = traceback_none;
672 else
673 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
674 rs6000_traceback_name);
677 /* Set size of long double */
678 rs6000_long_double_type_size = 64;
679 if (rs6000_long_double_size_string)
681 char *tail;
682 int size = strtol (rs6000_long_double_size_string, &tail, 10);
683 if (*tail != '\0' || (size != 64 && size != 128))
684 error ("Unknown switch -mlong-double-%s",
685 rs6000_long_double_size_string);
686 else
687 rs6000_long_double_type_size = size;
690 /* Handle -mabi= options. */
691 rs6000_parse_abi_options ();
693 /* Handle -mvrsave= option. */
694 rs6000_parse_vrsave_option ();
696 /* Handle -misel= option. */
697 rs6000_parse_isel_option ();
699 #ifdef SUBTARGET_OVERRIDE_OPTIONS
700 SUBTARGET_OVERRIDE_OPTIONS;
701 #endif
702 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
703 SUBSUBTARGET_OVERRIDE_OPTIONS;
704 #endif
706 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
707 using TARGET_OPTIONS to handle a toggle switch, but we're out of
708 bits in target_flags so TARGET_SWITCHES cannot be used.
709 Assumption here is that rs6000_longcall_switch points into the
710 text of the complete option, rather than being a copy, so we can
711 scan back for the presence or absence of the no- modifier. */
712 if (rs6000_longcall_switch)
714 const char *base = rs6000_longcall_switch;
715 while (base[-1] != 'm') base--;
717 if (*rs6000_longcall_switch != '\0')
718 error ("invalid option `%s'", base);
719 rs6000_default_long_calls = (base[0] != 'n');
722 #ifdef TARGET_REGNAMES
723 /* If the user desires alternate register names, copy in the
724 alternate names now. */
725 if (TARGET_REGNAMES)
726 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
727 #endif
729 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
730 If -maix-struct-return or -msvr4-struct-return was explicitly
731 used, don't override with the ABI default. */
732 if ((target_flags_explicit & MASK_AIX_STRUCT_RET) == 0)
734 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
735 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
736 else
737 target_flags |= MASK_AIX_STRUCT_RET;
740 if (TARGET_LONG_DOUBLE_128
741 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN))
742 real_format_for_mode[TFmode - QFmode] = &ibm_extended_format;
744 /* Allocate an alias set for register saves & restores from stack. */
745 rs6000_sr_alias_set = new_alias_set ();
747 if (TARGET_TOC)
748 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
750 /* We can only guarantee the availability of DI pseudo-ops when
751 assembling for 64-bit targets. */
752 if (!TARGET_64BIT)
754 targetm.asm_out.aligned_op.di = NULL;
755 targetm.asm_out.unaligned_op.di = NULL;
758 /* Set maximum branch target alignment at two instructions, eight bytes. */
759 align_jumps_max_skip = 8;
760 align_loops_max_skip = 8;
762 /* Arrange to save and restore machine status around nested functions. */
763 init_machine_status = rs6000_init_machine_status;
766 /* Handle -misel= option. */
767 static void
768 rs6000_parse_isel_option ()
770 if (rs6000_isel_string == 0)
771 return;
772 else if (! strcmp (rs6000_isel_string, "yes"))
773 rs6000_isel = 1;
774 else if (! strcmp (rs6000_isel_string, "no"))
775 rs6000_isel = 0;
776 else
777 error ("unknown -misel= option specified: '%s'",
778 rs6000_isel_string);
781 /* Handle -mvrsave= options. */
782 static void
783 rs6000_parse_vrsave_option ()
785 /* Generate VRSAVE instructions by default. */
786 if (rs6000_altivec_vrsave_string == 0
787 || ! strcmp (rs6000_altivec_vrsave_string, "yes"))
788 rs6000_altivec_vrsave = 1;
789 else if (! strcmp (rs6000_altivec_vrsave_string, "no"))
790 rs6000_altivec_vrsave = 0;
791 else
792 error ("unknown -mvrsave= option specified: '%s'",
793 rs6000_altivec_vrsave_string);
796 /* Handle -mabi= options. */
797 static void
798 rs6000_parse_abi_options ()
800 if (rs6000_abi_string == 0)
801 return;
802 else if (! strcmp (rs6000_abi_string, "altivec"))
803 rs6000_altivec_abi = 1;
804 else if (! strcmp (rs6000_abi_string, "no-altivec"))
805 rs6000_altivec_abi = 0;
806 else if (! strcmp (rs6000_abi_string, "spe"))
808 rs6000_spe_abi = 1;
809 if (!TARGET_SPE_ABI)
810 error ("not configured for ABI: '%s'", rs6000_abi_string);
813 else if (! strcmp (rs6000_abi_string, "no-spe"))
814 rs6000_spe_abi = 0;
815 else
816 error ("unknown ABI specified: '%s'", rs6000_abi_string);
819 void
820 optimization_options (level, size)
821 int level ATTRIBUTE_UNUSED;
822 int size ATTRIBUTE_UNUSED;
826 /* Do anything needed at the start of the asm file. */
828 void
829 rs6000_file_start (file, default_cpu)
830 FILE *file;
831 const char *default_cpu;
833 size_t i;
834 char buffer[80];
835 const char *start = buffer;
836 struct rs6000_cpu_select *ptr;
838 if (flag_verbose_asm)
840 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
841 rs6000_select[0].string = default_cpu;
843 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
845 ptr = &rs6000_select[i];
846 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
848 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
849 start = "";
853 #ifdef USING_ELFOS_H
854 switch (rs6000_sdata)
856 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
857 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
858 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
859 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
862 if (rs6000_sdata && g_switch_value)
864 fprintf (file, "%s -G %d", start, g_switch_value);
865 start = "";
867 #endif
869 if (*start == '\0')
870 putc ('\n', file);
874 /* Return nonzero if this function is known to have a null epilogue. */
877 direct_return ()
879 if (reload_completed)
881 rs6000_stack_t *info = rs6000_stack_info ();
883 if (info->first_gp_reg_save == 32
884 && info->first_fp_reg_save == 64
885 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
886 && ! info->lr_save_p
887 && ! info->cr_save_p
888 && info->vrsave_mask == 0
889 && ! info->push_p)
890 return 1;
893 return 0;
896 /* Returns 1 always. */
899 any_operand (op, mode)
900 rtx op ATTRIBUTE_UNUSED;
901 enum machine_mode mode ATTRIBUTE_UNUSED;
903 return 1;
906 /* Returns 1 if op is the count register. */
908 count_register_operand (op, mode)
909 rtx op;
910 enum machine_mode mode ATTRIBUTE_UNUSED;
912 if (GET_CODE (op) != REG)
913 return 0;
915 if (REGNO (op) == COUNT_REGISTER_REGNUM)
916 return 1;
918 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
919 return 1;
921 return 0;
924 /* Returns 1 if op is an altivec register. */
926 altivec_register_operand (op, mode)
927 rtx op;
928 enum machine_mode mode ATTRIBUTE_UNUSED;
931 return (register_operand (op, mode)
932 && (GET_CODE (op) != REG
933 || REGNO (op) > FIRST_PSEUDO_REGISTER
934 || ALTIVEC_REGNO_P (REGNO (op))));
938 xer_operand (op, mode)
939 rtx op;
940 enum machine_mode mode ATTRIBUTE_UNUSED;
942 if (GET_CODE (op) != REG)
943 return 0;
945 if (XER_REGNO_P (REGNO (op)))
946 return 1;
948 return 0;
951 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
952 by such constants completes more quickly. */
955 s8bit_cint_operand (op, mode)
956 rtx op;
957 enum machine_mode mode ATTRIBUTE_UNUSED;
959 return ( GET_CODE (op) == CONST_INT
960 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
963 /* Return 1 if OP is a constant that can fit in a D field. */
966 short_cint_operand (op, mode)
967 rtx op;
968 enum machine_mode mode ATTRIBUTE_UNUSED;
970 return (GET_CODE (op) == CONST_INT
971 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
974 /* Similar for an unsigned D field. */
977 u_short_cint_operand (op, mode)
978 rtx op;
979 enum machine_mode mode ATTRIBUTE_UNUSED;
981 return (GET_CODE (op) == CONST_INT
982 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
985 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
988 non_short_cint_operand (op, mode)
989 rtx op;
990 enum machine_mode mode ATTRIBUTE_UNUSED;
992 return (GET_CODE (op) == CONST_INT
993 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
996 /* Returns 1 if OP is a CONST_INT that is a positive value
997 and an exact power of 2. */
1000 exact_log2_cint_operand (op, mode)
1001 rtx op;
1002 enum machine_mode mode ATTRIBUTE_UNUSED;
1004 return (GET_CODE (op) == CONST_INT
1005 && INTVAL (op) > 0
1006 && exact_log2 (INTVAL (op)) >= 0);
1009 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
1010 ctr, or lr). */
1013 gpc_reg_operand (op, mode)
1014 rtx op;
1015 enum machine_mode mode;
1017 return (register_operand (op, mode)
1018 && (GET_CODE (op) != REG
1019 || (REGNO (op) >= ARG_POINTER_REGNUM
1020 && !XER_REGNO_P (REGNO (op)))
1021 || REGNO (op) < MQ_REGNO));
1024 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1025 CR field. */
1028 cc_reg_operand (op, mode)
1029 rtx op;
1030 enum machine_mode mode;
1032 return (register_operand (op, mode)
1033 && (GET_CODE (op) != REG
1034 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1035 || CR_REGNO_P (REGNO (op))));
1038 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1039 CR field that isn't CR0. */
1042 cc_reg_not_cr0_operand (op, mode)
1043 rtx op;
1044 enum machine_mode mode;
1046 return (register_operand (op, mode)
1047 && (GET_CODE (op) != REG
1048 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1049 || CR_REGNO_NOT_CR0_P (REGNO (op))));
1052 /* Returns 1 if OP is either a constant integer valid for a D-field or
1053 a non-special register. If a register, it must be in the proper
1054 mode unless MODE is VOIDmode. */
1057 reg_or_short_operand (op, mode)
1058 rtx op;
1059 enum machine_mode mode;
1061 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1064 /* Similar, except check if the negation of the constant would be
1065 valid for a D-field. */
1068 reg_or_neg_short_operand (op, mode)
1069 rtx op;
1070 enum machine_mode mode;
1072 if (GET_CODE (op) == CONST_INT)
1073 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
1075 return gpc_reg_operand (op, mode);
1078 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1079 a non-special register. If a register, it must be in the proper
1080 mode unless MODE is VOIDmode. */
1083 reg_or_aligned_short_operand (op, mode)
1084 rtx op;
1085 enum machine_mode mode;
1087 if (gpc_reg_operand (op, mode))
1088 return 1;
1089 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
1090 return 1;
1092 return 0;
1096 /* Return 1 if the operand is either a register or an integer whose
1097 high-order 16 bits are zero. */
1100 reg_or_u_short_operand (op, mode)
1101 rtx op;
1102 enum machine_mode mode;
1104 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1107 /* Return 1 is the operand is either a non-special register or ANY
1108 constant integer. */
1111 reg_or_cint_operand (op, mode)
1112 rtx op;
1113 enum machine_mode mode;
1115 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
1118 /* Return 1 is the operand is either a non-special register or ANY
1119 32-bit signed constant integer. */
1122 reg_or_arith_cint_operand (op, mode)
1123 rtx op;
1124 enum machine_mode mode;
1126 return (gpc_reg_operand (op, mode)
1127 || (GET_CODE (op) == CONST_INT
1128 #if HOST_BITS_PER_WIDE_INT != 32
1129 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
1130 < (unsigned HOST_WIDE_INT) 0x100000000ll)
1131 #endif
1135 /* Return 1 is the operand is either a non-special register or a 32-bit
1136 signed constant integer valid for 64-bit addition. */
1139 reg_or_add_cint64_operand (op, mode)
1140 rtx op;
1141 enum machine_mode mode;
1143 return (gpc_reg_operand (op, mode)
1144 || (GET_CODE (op) == CONST_INT
1145 #if HOST_BITS_PER_WIDE_INT == 32
1146 && INTVAL (op) < 0x7fff8000
1147 #else
1148 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
1149 < 0x100000000ll)
1150 #endif
1154 /* Return 1 is the operand is either a non-special register or a 32-bit
1155 signed constant integer valid for 64-bit subtraction. */
1158 reg_or_sub_cint64_operand (op, mode)
1159 rtx op;
1160 enum machine_mode mode;
1162 return (gpc_reg_operand (op, mode)
1163 || (GET_CODE (op) == CONST_INT
1164 #if HOST_BITS_PER_WIDE_INT == 32
1165 && (- INTVAL (op)) < 0x7fff8000
1166 #else
1167 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1168 < 0x100000000ll)
1169 #endif
1173 /* Return 1 is the operand is either a non-special register or ANY
1174 32-bit unsigned constant integer. */
1177 reg_or_logical_cint_operand (op, mode)
1178 rtx op;
1179 enum machine_mode mode;
1181 if (GET_CODE (op) == CONST_INT)
1183 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1185 if (GET_MODE_BITSIZE (mode) <= 32)
1186 abort ();
1188 if (INTVAL (op) < 0)
1189 return 0;
1192 return ((INTVAL (op) & GET_MODE_MASK (mode)
1193 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1195 else if (GET_CODE (op) == CONST_DOUBLE)
1197 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1198 || mode != DImode)
1199 abort ();
1201 return CONST_DOUBLE_HIGH (op) == 0;
1203 else
1204 return gpc_reg_operand (op, mode);
1207 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1210 got_operand (op, mode)
1211 rtx op;
1212 enum machine_mode mode ATTRIBUTE_UNUSED;
1214 return (GET_CODE (op) == SYMBOL_REF
1215 || GET_CODE (op) == CONST
1216 || GET_CODE (op) == LABEL_REF);
1219 /* Return 1 if the operand is a simple references that can be loaded via
1220 the GOT (labels involving addition aren't allowed). */
1223 got_no_const_operand (op, mode)
1224 rtx op;
1225 enum machine_mode mode ATTRIBUTE_UNUSED;
1227 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1230 /* Return the number of instructions it takes to form a constant in an
1231 integer register. */
1233 static int
1234 num_insns_constant_wide (value)
1235 HOST_WIDE_INT value;
1237 /* signed constant loadable with {cal|addi} */
1238 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1239 return 1;
1241 /* constant loadable with {cau|addis} */
1242 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1243 return 1;
1245 #if HOST_BITS_PER_WIDE_INT == 64
1246 else if (TARGET_POWERPC64)
1248 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1249 HOST_WIDE_INT high = value >> 31;
1251 if (high == 0 || high == -1)
1252 return 2;
1254 high >>= 1;
1256 if (low == 0)
1257 return num_insns_constant_wide (high) + 1;
1258 else
1259 return (num_insns_constant_wide (high)
1260 + num_insns_constant_wide (low) + 1);
1262 #endif
1264 else
1265 return 2;
1269 num_insns_constant (op, mode)
1270 rtx op;
1271 enum machine_mode mode;
1273 if (GET_CODE (op) == CONST_INT)
1275 #if HOST_BITS_PER_WIDE_INT == 64
1276 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1277 && mask64_operand (op, mode))
1278 return 2;
1279 else
1280 #endif
1281 return num_insns_constant_wide (INTVAL (op));
1284 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1286 long l;
1287 REAL_VALUE_TYPE rv;
1289 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1290 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1291 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1294 else if (GET_CODE (op) == CONST_DOUBLE)
1296 HOST_WIDE_INT low;
1297 HOST_WIDE_INT high;
1298 long l[2];
1299 REAL_VALUE_TYPE rv;
1300 int endian = (WORDS_BIG_ENDIAN == 0);
1302 if (mode == VOIDmode || mode == DImode)
1304 high = CONST_DOUBLE_HIGH (op);
1305 low = CONST_DOUBLE_LOW (op);
1307 else
1309 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1310 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1311 high = l[endian];
1312 low = l[1 - endian];
1315 if (TARGET_32BIT)
1316 return (num_insns_constant_wide (low)
1317 + num_insns_constant_wide (high));
1319 else
1321 if (high == 0 && low >= 0)
1322 return num_insns_constant_wide (low);
1324 else if (high == -1 && low < 0)
1325 return num_insns_constant_wide (low);
1327 else if (mask64_operand (op, mode))
1328 return 2;
1330 else if (low == 0)
1331 return num_insns_constant_wide (high) + 1;
1333 else
1334 return (num_insns_constant_wide (high)
1335 + num_insns_constant_wide (low) + 1);
1339 else
1340 abort ();
1343 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1344 register with one instruction per word. We only do this if we can
1345 safely read CONST_DOUBLE_{LOW,HIGH}. */
1348 easy_fp_constant (op, mode)
1349 rtx op;
1350 enum machine_mode mode;
1352 if (GET_CODE (op) != CONST_DOUBLE
1353 || GET_MODE (op) != mode
1354 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1355 return 0;
1357 /* Consider all constants with -msoft-float to be easy. */
1358 if ((TARGET_SOFT_FLOAT || !TARGET_FPRS)
1359 && mode != DImode)
1360 return 1;
1362 /* If we are using V.4 style PIC, consider all constants to be hard. */
1363 if (flag_pic && DEFAULT_ABI == ABI_V4)
1364 return 0;
1366 #ifdef TARGET_RELOCATABLE
1367 /* Similarly if we are using -mrelocatable, consider all constants
1368 to be hard. */
1369 if (TARGET_RELOCATABLE)
1370 return 0;
1371 #endif
1373 if (mode == TFmode)
1375 long k[4];
1376 REAL_VALUE_TYPE rv;
1378 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1379 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1381 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1382 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1
1383 && num_insns_constant_wide ((HOST_WIDE_INT) k[2]) == 1
1384 && num_insns_constant_wide ((HOST_WIDE_INT) k[3]) == 1);
1387 else if (mode == DFmode)
1389 long k[2];
1390 REAL_VALUE_TYPE rv;
1392 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1393 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1395 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1396 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1399 else if (mode == SFmode)
1401 long l;
1402 REAL_VALUE_TYPE rv;
1404 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1405 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1407 return num_insns_constant_wide (l) == 1;
1410 else if (mode == DImode)
1411 return ((TARGET_POWERPC64
1412 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1413 || (num_insns_constant (op, DImode) <= 2));
1415 else if (mode == SImode)
1416 return 1;
1417 else
1418 abort ();
1421 /* Return 1 if the operand is a CONST_INT and can be put into a
1422 register with one instruction. */
1424 static int
1425 easy_vector_constant (op)
1426 rtx op;
1428 rtx elt;
1429 int units, i;
1431 if (GET_CODE (op) != CONST_VECTOR)
1432 return 0;
1434 units = CONST_VECTOR_NUNITS (op);
1436 /* We can generate 0 easily. Look for that. */
1437 for (i = 0; i < units; ++i)
1439 elt = CONST_VECTOR_ELT (op, i);
1441 /* We could probably simplify this by just checking for equality
1442 with CONST0_RTX for the current mode, but let's be safe
1443 instead. */
1445 switch (GET_CODE (elt))
1447 case CONST_INT:
1448 if (INTVAL (elt) != 0)
1449 return 0;
1450 break;
1451 case CONST_DOUBLE:
1452 if (CONST_DOUBLE_LOW (elt) != 0 || CONST_DOUBLE_HIGH (elt) != 0)
1453 return 0;
1454 break;
1455 default:
1456 return 0;
1460 /* We could probably generate a few other constants trivially, but
1461 gcc doesn't generate them yet. FIXME later. */
1462 return 1;
1465 /* Return 1 if the operand is the constant 0. This works for scalars
1466 as well as vectors. */
1468 zero_constant (op, mode)
1469 rtx op;
1470 enum machine_mode mode;
1472 return op == CONST0_RTX (mode);
1475 /* Return 1 if the operand is 0.0. */
1477 zero_fp_constant (op, mode)
1478 rtx op;
1479 enum machine_mode mode;
1481 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1484 /* Return 1 if the operand is in volatile memory. Note that during
1485 the RTL generation phase, memory_operand does not return TRUE for
1486 volatile memory references. So this function allows us to
1487 recognize volatile references where its safe. */
1490 volatile_mem_operand (op, mode)
1491 rtx op;
1492 enum machine_mode mode;
1494 if (GET_CODE (op) != MEM)
1495 return 0;
1497 if (!MEM_VOLATILE_P (op))
1498 return 0;
1500 if (mode != GET_MODE (op))
1501 return 0;
1503 if (reload_completed)
1504 return memory_operand (op, mode);
1506 if (reload_in_progress)
1507 return strict_memory_address_p (mode, XEXP (op, 0));
1509 return memory_address_p (mode, XEXP (op, 0));
1512 /* Return 1 if the operand is an offsettable memory operand. */
1515 offsettable_mem_operand (op, mode)
1516 rtx op;
1517 enum machine_mode mode;
1519 return ((GET_CODE (op) == MEM)
1520 && offsettable_address_p (reload_completed || reload_in_progress,
1521 mode, XEXP (op, 0)));
1524 /* Return 1 if the operand is either an easy FP constant (see above) or
1525 memory. */
1528 mem_or_easy_const_operand (op, mode)
1529 rtx op;
1530 enum machine_mode mode;
1532 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1535 /* Return 1 if the operand is either a non-special register or an item
1536 that can be used as the operand of a `mode' add insn. */
1539 add_operand (op, mode)
1540 rtx op;
1541 enum machine_mode mode;
1543 if (GET_CODE (op) == CONST_INT)
1544 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1545 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1547 return gpc_reg_operand (op, mode);
1550 /* Return 1 if OP is a constant but not a valid add_operand. */
1553 non_add_cint_operand (op, mode)
1554 rtx op;
1555 enum machine_mode mode ATTRIBUTE_UNUSED;
1557 return (GET_CODE (op) == CONST_INT
1558 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1559 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1562 /* Return 1 if the operand is a non-special register or a constant that
1563 can be used as the operand of an OR or XOR insn on the RS/6000. */
1566 logical_operand (op, mode)
1567 rtx op;
1568 enum machine_mode mode;
1570 HOST_WIDE_INT opl, oph;
1572 if (gpc_reg_operand (op, mode))
1573 return 1;
1575 if (GET_CODE (op) == CONST_INT)
1577 opl = INTVAL (op) & GET_MODE_MASK (mode);
1579 #if HOST_BITS_PER_WIDE_INT <= 32
1580 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1581 return 0;
1582 #endif
1584 else if (GET_CODE (op) == CONST_DOUBLE)
1586 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1587 abort ();
1589 opl = CONST_DOUBLE_LOW (op);
1590 oph = CONST_DOUBLE_HIGH (op);
1591 if (oph != 0)
1592 return 0;
1594 else
1595 return 0;
1597 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1598 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1601 /* Return 1 if C is a constant that is not a logical operand (as
1602 above), but could be split into one. */
1605 non_logical_cint_operand (op, mode)
1606 rtx op;
1607 enum machine_mode mode;
1609 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1610 && ! logical_operand (op, mode)
1611 && reg_or_logical_cint_operand (op, mode));
1614 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1615 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1616 Reject all ones and all zeros, since these should have been optimized
1617 away and confuse the making of MB and ME. */
1620 mask_operand (op, mode)
1621 rtx op;
1622 enum machine_mode mode ATTRIBUTE_UNUSED;
1624 HOST_WIDE_INT c, lsb;
1626 if (GET_CODE (op) != CONST_INT)
1627 return 0;
1629 c = INTVAL (op);
1631 /* Fail in 64-bit mode if the mask wraps around because the upper
1632 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1633 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
1634 return 0;
1636 /* We don't change the number of transitions by inverting,
1637 so make sure we start with the LS bit zero. */
1638 if (c & 1)
1639 c = ~c;
1641 /* Reject all zeros or all ones. */
1642 if (c == 0)
1643 return 0;
1645 /* Find the first transition. */
1646 lsb = c & -c;
1648 /* Invert to look for a second transition. */
1649 c = ~c;
1651 /* Erase first transition. */
1652 c &= -lsb;
1654 /* Find the second transition (if any). */
1655 lsb = c & -c;
1657 /* Match if all the bits above are 1's (or c is zero). */
1658 return c == -lsb;
1661 /* Return 1 for the PowerPC64 rlwinm corner case. */
1664 mask_operand_wrap (op, mode)
1665 rtx op;
1666 enum machine_mode mode ATTRIBUTE_UNUSED;
1668 HOST_WIDE_INT c, lsb;
1670 if (GET_CODE (op) != CONST_INT)
1671 return 0;
1673 c = INTVAL (op);
1675 if ((c & 0x80000001) != 0x80000001)
1676 return 0;
1678 c = ~c;
1679 if (c == 0)
1680 return 0;
1682 lsb = c & -c;
1683 c = ~c;
1684 c &= -lsb;
1685 lsb = c & -c;
1686 return c == -lsb;
1689 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1690 It is if there are no more than one 1->0 or 0->1 transitions.
1691 Reject all zeros, since zero should have been optimized away and
1692 confuses the making of MB and ME. */
1695 mask64_operand (op, mode)
1696 rtx op;
1697 enum machine_mode mode ATTRIBUTE_UNUSED;
1699 if (GET_CODE (op) == CONST_INT)
1701 HOST_WIDE_INT c, lsb;
1703 c = INTVAL (op);
1705 /* Reject all zeros. */
1706 if (c == 0)
1707 return 0;
1709 /* We don't change the number of transitions by inverting,
1710 so make sure we start with the LS bit zero. */
1711 if (c & 1)
1712 c = ~c;
1714 /* Find the transition, and check that all bits above are 1's. */
1715 lsb = c & -c;
1717 /* Match if all the bits above are 1's (or c is zero). */
1718 return c == -lsb;
1720 return 0;
1723 /* Like mask64_operand, but allow up to three transitions. This
1724 predicate is used by insn patterns that generate two rldicl or
1725 rldicr machine insns. */
1728 mask64_2_operand (op, mode)
1729 rtx op;
1730 enum machine_mode mode ATTRIBUTE_UNUSED;
1732 if (GET_CODE (op) == CONST_INT)
1734 HOST_WIDE_INT c, lsb;
1736 c = INTVAL (op);
1738 /* Disallow all zeros. */
1739 if (c == 0)
1740 return 0;
1742 /* We don't change the number of transitions by inverting,
1743 so make sure we start with the LS bit zero. */
1744 if (c & 1)
1745 c = ~c;
1747 /* Find the first transition. */
1748 lsb = c & -c;
1750 /* Invert to look for a second transition. */
1751 c = ~c;
1753 /* Erase first transition. */
1754 c &= -lsb;
1756 /* Find the second transition. */
1757 lsb = c & -c;
1759 /* Invert to look for a third transition. */
1760 c = ~c;
1762 /* Erase second transition. */
1763 c &= -lsb;
1765 /* Find the third transition (if any). */
1766 lsb = c & -c;
1768 /* Match if all the bits above are 1's (or c is zero). */
1769 return c == -lsb;
1771 return 0;
1774 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
1775 implement ANDing by the mask IN. */
1776 void
1777 build_mask64_2_operands (in, out)
1778 rtx in;
1779 rtx *out;
1781 #if HOST_BITS_PER_WIDE_INT >= 64
1782 unsigned HOST_WIDE_INT c, lsb, m1, m2;
1783 int shift;
1785 if (GET_CODE (in) != CONST_INT)
1786 abort ();
1788 c = INTVAL (in);
1789 if (c & 1)
1791 /* Assume c initially something like 0x00fff000000fffff. The idea
1792 is to rotate the word so that the middle ^^^^^^ group of zeros
1793 is at the MS end and can be cleared with an rldicl mask. We then
1794 rotate back and clear off the MS ^^ group of zeros with a
1795 second rldicl. */
1796 c = ~c; /* c == 0xff000ffffff00000 */
1797 lsb = c & -c; /* lsb == 0x0000000000100000 */
1798 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
1799 c = ~c; /* c == 0x00fff000000fffff */
1800 c &= -lsb; /* c == 0x00fff00000000000 */
1801 lsb = c & -c; /* lsb == 0x0000100000000000 */
1802 c = ~c; /* c == 0xff000fffffffffff */
1803 c &= -lsb; /* c == 0xff00000000000000 */
1804 shift = 0;
1805 while ((lsb >>= 1) != 0)
1806 shift++; /* shift == 44 on exit from loop */
1807 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
1808 m1 = ~m1; /* m1 == 0x000000ffffffffff */
1809 m2 = ~c; /* m2 == 0x00ffffffffffffff */
1811 else
1813 /* Assume c initially something like 0xff000f0000000000. The idea
1814 is to rotate the word so that the ^^^ middle group of zeros
1815 is at the LS end and can be cleared with an rldicr mask. We then
1816 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
1817 a second rldicr. */
1818 lsb = c & -c; /* lsb == 0x0000010000000000 */
1819 m2 = -lsb; /* m2 == 0xffffff0000000000 */
1820 c = ~c; /* c == 0x00fff0ffffffffff */
1821 c &= -lsb; /* c == 0x00fff00000000000 */
1822 lsb = c & -c; /* lsb == 0x0000100000000000 */
1823 c = ~c; /* c == 0xff000fffffffffff */
1824 c &= -lsb; /* c == 0xff00000000000000 */
1825 shift = 0;
1826 while ((lsb >>= 1) != 0)
1827 shift++; /* shift == 44 on exit from loop */
1828 m1 = ~c; /* m1 == 0x00ffffffffffffff */
1829 m1 >>= shift; /* m1 == 0x0000000000000fff */
1830 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
1833 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
1834 masks will be all 1's. We are guaranteed more than one transition. */
1835 out[0] = GEN_INT (64 - shift);
1836 out[1] = GEN_INT (m1);
1837 out[2] = GEN_INT (shift);
1838 out[3] = GEN_INT (m2);
1839 #else
1840 (void)in;
1841 (void)out;
1842 abort ();
1843 #endif
1846 /* Return 1 if the operand is either a non-special register or a constant
1847 that can be used as the operand of a PowerPC64 logical AND insn. */
1850 and64_operand (op, mode)
1851 rtx op;
1852 enum machine_mode mode;
1854 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1855 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
1857 return (logical_operand (op, mode) || mask64_operand (op, mode));
1860 /* Like the above, but also match constants that can be implemented
1861 with two rldicl or rldicr insns. */
1864 and64_2_operand (op, mode)
1865 rtx op;
1866 enum machine_mode mode;
1868 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1869 return gpc_reg_operand (op, mode) || mask64_2_operand (op, mode);
1871 return logical_operand (op, mode) || mask64_2_operand (op, mode);
1874 /* Return 1 if the operand is either a non-special register or a
1875 constant that can be used as the operand of an RS/6000 logical AND insn. */
1878 and_operand (op, mode)
1879 rtx op;
1880 enum machine_mode mode;
1882 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1883 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
1885 return (logical_operand (op, mode) || mask_operand (op, mode));
1888 /* Return 1 if the operand is a general register or memory operand. */
1891 reg_or_mem_operand (op, mode)
1892 rtx op;
1893 enum machine_mode mode;
1895 return (gpc_reg_operand (op, mode)
1896 || memory_operand (op, mode)
1897 || volatile_mem_operand (op, mode));
1900 /* Return 1 if the operand is a general register or memory operand without
1901 pre_inc or pre_dec which produces invalid form of PowerPC lwa
1902 instruction. */
1905 lwa_operand (op, mode)
1906 rtx op;
1907 enum machine_mode mode;
1909 rtx inner = op;
1911 if (reload_completed && GET_CODE (inner) == SUBREG)
1912 inner = SUBREG_REG (inner);
1914 return gpc_reg_operand (inner, mode)
1915 || (memory_operand (inner, mode)
1916 && GET_CODE (XEXP (inner, 0)) != PRE_INC
1917 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
1918 && (GET_CODE (XEXP (inner, 0)) != PLUS
1919 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
1920 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
1923 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
1926 symbol_ref_operand (op, mode)
1927 rtx op;
1928 enum machine_mode mode;
1930 if (mode != VOIDmode && GET_MODE (op) != mode)
1931 return 0;
1933 return (GET_CODE (op) == SYMBOL_REF);
1936 /* Return 1 if the operand, used inside a MEM, is a valid first argument
1937 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
1940 call_operand (op, mode)
1941 rtx op;
1942 enum machine_mode mode;
1944 if (mode != VOIDmode && GET_MODE (op) != mode)
1945 return 0;
1947 return (GET_CODE (op) == SYMBOL_REF
1948 || (GET_CODE (op) == REG
1949 && (REGNO (op) == LINK_REGISTER_REGNUM
1950 || REGNO (op) == COUNT_REGISTER_REGNUM
1951 || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
1954 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
1955 this file and the function is not weakly defined. */
1958 current_file_function_operand (op, mode)
1959 rtx op;
1960 enum machine_mode mode ATTRIBUTE_UNUSED;
1962 return (GET_CODE (op) == SYMBOL_REF
1963 && (SYMBOL_REF_FLAG (op)
1964 || (op == XEXP (DECL_RTL (current_function_decl), 0)
1965 && ! DECL_WEAK (current_function_decl))));
1968 /* Return 1 if this operand is a valid input for a move insn. */
1971 input_operand (op, mode)
1972 rtx op;
1973 enum machine_mode mode;
1975 /* Memory is always valid. */
1976 if (memory_operand (op, mode))
1977 return 1;
1979 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
1980 if (GET_CODE (op) == CONSTANT_P_RTX)
1981 return 1;
1983 /* For floating-point, easy constants are valid. */
1984 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1985 && CONSTANT_P (op)
1986 && easy_fp_constant (op, mode))
1987 return 1;
1989 /* Allow any integer constant. */
1990 if (GET_MODE_CLASS (mode) == MODE_INT
1991 && (GET_CODE (op) == CONST_INT
1992 || GET_CODE (op) == CONST_DOUBLE))
1993 return 1;
1995 /* For floating-point or multi-word mode, the only remaining valid type
1996 is a register. */
1997 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1998 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1999 return register_operand (op, mode);
2001 /* The only cases left are integral modes one word or smaller (we
2002 do not get called for MODE_CC values). These can be in any
2003 register. */
2004 if (register_operand (op, mode))
2005 return 1;
2007 /* A SYMBOL_REF referring to the TOC is valid. */
2008 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (op))
2009 return 1;
2011 /* A constant pool expression (relative to the TOC) is valid */
2012 if (TOC_RELATIVE_EXPR_P (op))
2013 return 1;
2015 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
2016 to be valid. */
2017 if (DEFAULT_ABI == ABI_V4
2018 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
2019 && small_data_operand (op, Pmode))
2020 return 1;
2022 return 0;
2025 /* Return 1 for an operand in small memory on V.4/eabi. */
2028 small_data_operand (op, mode)
2029 rtx op ATTRIBUTE_UNUSED;
2030 enum machine_mode mode ATTRIBUTE_UNUSED;
2032 #if TARGET_ELF
2033 rtx sym_ref;
2035 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
2036 return 0;
2038 if (DEFAULT_ABI != ABI_V4)
2039 return 0;
2041 if (GET_CODE (op) == SYMBOL_REF)
2042 sym_ref = op;
2044 else if (GET_CODE (op) != CONST
2045 || GET_CODE (XEXP (op, 0)) != PLUS
2046 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2047 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
2048 return 0;
2050 else
2052 rtx sum = XEXP (op, 0);
2053 HOST_WIDE_INT summand;
2055 /* We have to be careful here, because it is the referenced address
2056 that must be 32k from _SDA_BASE_, not just the symbol. */
2057 summand = INTVAL (XEXP (sum, 1));
2058 if (summand < 0 || summand > g_switch_value)
2059 return 0;
2061 sym_ref = XEXP (sum, 0);
2064 if (*XSTR (sym_ref, 0) != '@')
2065 return 0;
2067 return 1;
2069 #else
2070 return 0;
2071 #endif
2074 static int
2075 constant_pool_expr_1 (op, have_sym, have_toc)
2076 rtx op;
2077 int *have_sym;
2078 int *have_toc;
2080 switch (GET_CODE(op))
2082 case SYMBOL_REF:
2083 if (CONSTANT_POOL_ADDRESS_P (op))
2085 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2087 *have_sym = 1;
2088 return 1;
2090 else
2091 return 0;
2093 else if (! strcmp (XSTR (op, 0), toc_label_name))
2095 *have_toc = 1;
2096 return 1;
2098 else
2099 return 0;
2100 case PLUS:
2101 case MINUS:
2102 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2103 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2104 case CONST:
2105 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2106 case CONST_INT:
2107 return 1;
2108 default:
2109 return 0;
2114 constant_pool_expr_p (op)
2115 rtx op;
2117 int have_sym = 0;
2118 int have_toc = 0;
2119 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2123 toc_relative_expr_p (op)
2124 rtx op;
2126 int have_sym = 0;
2127 int have_toc = 0;
2128 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2131 /* Try machine-dependent ways of modifying an illegitimate address
2132 to be legitimate. If we find one, return the new, valid address.
2133 This is used from only one place: `memory_address' in explow.c.
2135 OLDX is the address as it was before break_out_memory_refs was
2136 called. In some cases it is useful to look at this to decide what
2137 needs to be done.
2139 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2141 It is always safe for this function to do nothing. It exists to
2142 recognize opportunities to optimize the output.
2144 On RS/6000, first check for the sum of a register with a constant
2145 integer that is out of range. If so, generate code to add the
2146 constant with the low-order 16 bits masked to the register and force
2147 this result into another register (this can be done with `cau').
2148 Then generate an address of REG+(CONST&0xffff), allowing for the
2149 possibility of bit 16 being a one.
2151 Then check for the sum of a register and something not constant, try to
2152 load the other things into a register and return the sum. */
2154 rs6000_legitimize_address (x, oldx, mode)
2155 rtx x;
2156 rtx oldx ATTRIBUTE_UNUSED;
2157 enum machine_mode mode;
2159 if (GET_CODE (x) == PLUS
2160 && GET_CODE (XEXP (x, 0)) == REG
2161 && GET_CODE (XEXP (x, 1)) == CONST_INT
2162 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2164 HOST_WIDE_INT high_int, low_int;
2165 rtx sum;
2166 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2167 high_int = INTVAL (XEXP (x, 1)) - low_int;
2168 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2169 GEN_INT (high_int)), 0);
2170 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2172 else if (GET_CODE (x) == PLUS
2173 && GET_CODE (XEXP (x, 0)) == REG
2174 && GET_CODE (XEXP (x, 1)) != CONST_INT
2175 && GET_MODE_NUNITS (mode) == 1
2176 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2177 || TARGET_POWERPC64
2178 || (mode != DFmode && mode != TFmode))
2179 && (TARGET_POWERPC64 || mode != DImode)
2180 && mode != TImode)
2182 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
2183 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
2185 else if (ALTIVEC_VECTOR_MODE (mode))
2187 rtx reg;
2189 /* Make sure both operands are registers. */
2190 if (GET_CODE (x) == PLUS)
2191 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
2192 force_reg (Pmode, XEXP (x, 1)));
2194 reg = force_reg (Pmode, x);
2195 return reg;
2197 else if (SPE_VECTOR_MODE (mode))
2199 /* We accept [reg + reg] and [reg + OFFSET]. */
2201 if (GET_CODE (x) == PLUS)
2203 rtx op1 = XEXP (x, 0);
2204 rtx op2 = XEXP (x, 1);
2206 op1 = force_reg (Pmode, op1);
2208 if (GET_CODE (op2) != REG
2209 && (GET_CODE (op2) != CONST_INT
2210 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
2211 op2 = force_reg (Pmode, op2);
2213 return gen_rtx_PLUS (Pmode, op1, op2);
2216 return force_reg (Pmode, x);
2218 else if (TARGET_ELF && TARGET_32BIT && TARGET_NO_TOC && ! flag_pic
2219 && GET_CODE (x) != CONST_INT
2220 && GET_CODE (x) != CONST_DOUBLE
2221 && CONSTANT_P (x)
2222 && GET_MODE_NUNITS (mode) == 1
2223 && (GET_MODE_BITSIZE (mode) <= 32
2224 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
2226 rtx reg = gen_reg_rtx (Pmode);
2227 emit_insn (gen_elf_high (reg, (x)));
2228 return gen_rtx_LO_SUM (Pmode, reg, (x));
2230 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
2231 && ! flag_pic
2232 && GET_CODE (x) != CONST_INT
2233 && GET_CODE (x) != CONST_DOUBLE
2234 && CONSTANT_P (x)
2235 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
2236 && mode != DImode
2237 && mode != TImode)
2239 rtx reg = gen_reg_rtx (Pmode);
2240 emit_insn (gen_macho_high (reg, (x)));
2241 return gen_rtx_LO_SUM (Pmode, reg, (x));
2243 else if (TARGET_TOC
2244 && CONSTANT_POOL_EXPR_P (x)
2245 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
2247 return create_TOC_reference (x);
2249 else
2250 return NULL_RTX;
2253 /* The convention appears to be to define this wherever it is used.
2254 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
2255 is now used here. */
2256 #ifndef REG_MODE_OK_FOR_BASE_P
2257 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
2258 #endif
2260 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
2261 replace the input X, or the original X if no replacement is called for.
2262 The output parameter *WIN is 1 if the calling macro should goto WIN,
2263 0 if it should not.
2265 For RS/6000, we wish to handle large displacements off a base
2266 register by splitting the addend across an addiu/addis and the mem insn.
2267 This cuts number of extra insns needed from 3 to 1.
2269 On Darwin, we use this to generate code for floating point constants.
2270 A movsf_low is generated so we wind up with 2 instructions rather than 3.
2271 The Darwin code is inside #if TARGET_MACHO because only then is
2272 machopic_function_base_name() defined. */
2274 rs6000_legitimize_reload_address (x, mode, opnum, type, ind_levels, win)
2275 rtx x;
2276 enum machine_mode mode;
2277 int opnum;
2278 int type;
2279 int ind_levels ATTRIBUTE_UNUSED;
2280 int *win;
2282 /* We must recognize output that we have already generated ourselves. */
2283 if (GET_CODE (x) == PLUS
2284 && GET_CODE (XEXP (x, 0)) == PLUS
2285 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
2286 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2287 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2289 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2290 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2291 opnum, (enum reload_type)type);
2292 *win = 1;
2293 return x;
2296 #if TARGET_MACHO
2297 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
2298 && GET_CODE (x) == LO_SUM
2299 && GET_CODE (XEXP (x, 0)) == PLUS
2300 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
2301 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
2302 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
2303 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
2304 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
2305 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
2306 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
2308 /* Result of previous invocation of this function on Darwin
2309 floating point constant. */
2310 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2311 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2312 opnum, (enum reload_type)type);
2313 *win = 1;
2314 return x;
2316 #endif
2317 if (GET_CODE (x) == PLUS
2318 && GET_CODE (XEXP (x, 0)) == REG
2319 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2320 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
2321 && GET_CODE (XEXP (x, 1)) == CONST_INT
2322 && !SPE_VECTOR_MODE (mode)
2323 && !ALTIVEC_VECTOR_MODE (mode))
2325 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
2326 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
2327 HOST_WIDE_INT high
2328 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
2330 /* Check for 32-bit overflow. */
2331 if (high + low != val)
2333 *win = 0;
2334 return x;
2337 /* Reload the high part into a base reg; leave the low part
2338 in the mem directly. */
2340 x = gen_rtx_PLUS (GET_MODE (x),
2341 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
2342 GEN_INT (high)),
2343 GEN_INT (low));
2345 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2346 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2347 opnum, (enum reload_type)type);
2348 *win = 1;
2349 return x;
2351 #if TARGET_MACHO
2352 if (GET_CODE (x) == SYMBOL_REF
2353 && DEFAULT_ABI == ABI_DARWIN
2354 && !ALTIVEC_VECTOR_MODE (mode)
2355 && flag_pic)
2357 /* Darwin load of floating point constant. */
2358 rtx offset = gen_rtx (CONST, Pmode,
2359 gen_rtx (MINUS, Pmode, x,
2360 gen_rtx (SYMBOL_REF, Pmode,
2361 machopic_function_base_name ())));
2362 x = gen_rtx (LO_SUM, GET_MODE (x),
2363 gen_rtx (PLUS, Pmode, pic_offset_table_rtx,
2364 gen_rtx (HIGH, Pmode, offset)), offset);
2365 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2366 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2367 opnum, (enum reload_type)type);
2368 *win = 1;
2369 return x;
2371 #endif
2372 if (TARGET_TOC
2373 && CONSTANT_POOL_EXPR_P (x)
2374 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
2376 (x) = create_TOC_reference (x);
2377 *win = 1;
2378 return x;
2380 *win = 0;
2381 return x;
2384 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2385 that is a valid memory address for an instruction.
2386 The MODE argument is the machine mode for the MEM expression
2387 that wants to use this address.
2389 On the RS/6000, there are four valid address: a SYMBOL_REF that
2390 refers to a constant pool entry of an address (or the sum of it
2391 plus a constant), a short (16-bit signed) constant plus a register,
2392 the sum of two registers, or a register indirect, possibly with an
2393 auto-increment. For DFmode and DImode with a constant plus register,
2394 we must ensure that both words are addressable or PowerPC64 with offset
2395 word aligned.
2397 For modes spanning multiple registers (DFmode in 32-bit GPRs,
2398 32-bit DImode, TImode), indexed addressing cannot be used because
2399 adjacent memory cells are accessed by adding word-sized offsets
2400 during assembly output. */
2402 rs6000_legitimate_address (mode, x, reg_ok_strict)
2403 enum machine_mode mode;
2404 rtx x;
2405 int reg_ok_strict;
2407 if (LEGITIMATE_INDIRECT_ADDRESS_P (x, reg_ok_strict))
2408 return 1;
2409 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2410 && !ALTIVEC_VECTOR_MODE (mode)
2411 && !SPE_VECTOR_MODE (mode)
2412 && TARGET_UPDATE
2413 && LEGITIMATE_INDIRECT_ADDRESS_P (XEXP (x, 0), reg_ok_strict))
2414 return 1;
2415 if (LEGITIMATE_SMALL_DATA_P (mode, x))
2416 return 1;
2417 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
2418 return 1;
2419 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
2420 if (! reg_ok_strict
2421 && GET_CODE (x) == PLUS
2422 && GET_CODE (XEXP (x, 0)) == REG
2423 && XEXP (x, 0) == virtual_stack_vars_rtx
2424 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2425 return 1;
2426 if (LEGITIMATE_OFFSET_ADDRESS_P (mode, x, reg_ok_strict))
2427 return 1;
2428 if (mode != TImode
2429 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2430 || TARGET_POWERPC64
2431 || (mode != DFmode && mode != TFmode))
2432 && (TARGET_POWERPC64 || mode != DImode)
2433 && LEGITIMATE_INDEXED_ADDRESS_P (x, reg_ok_strict))
2434 return 1;
2435 if (LEGITIMATE_LO_SUM_ADDRESS_P (mode, x, reg_ok_strict))
2436 return 1;
2437 return 0;
2440 /* Try to output insns to set TARGET equal to the constant C if it can
2441 be done in less than N insns. Do all computations in MODE.
2442 Returns the place where the output has been placed if it can be
2443 done and the insns have been emitted. If it would take more than N
2444 insns, zero is returned and no insns and emitted. */
2447 rs6000_emit_set_const (dest, mode, source, n)
2448 rtx dest, source;
2449 enum machine_mode mode;
2450 int n ATTRIBUTE_UNUSED;
2452 rtx result, insn, set;
2453 HOST_WIDE_INT c0, c1;
2455 if (mode == QImode || mode == HImode)
2457 if (dest == NULL)
2458 dest = gen_reg_rtx (mode);
2459 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
2460 return dest;
2462 else if (mode == SImode)
2464 result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
2466 emit_insn (gen_rtx_SET (VOIDmode, result,
2467 GEN_INT (INTVAL (source)
2468 & (~ (HOST_WIDE_INT) 0xffff))));
2469 emit_insn (gen_rtx_SET (VOIDmode, dest,
2470 gen_rtx_IOR (SImode, result,
2471 GEN_INT (INTVAL (source) & 0xffff))));
2472 result = dest;
2474 else if (mode == DImode)
2476 if (GET_CODE (source) == CONST_INT)
2478 c0 = INTVAL (source);
2479 c1 = -(c0 < 0);
2481 else if (GET_CODE (source) == CONST_DOUBLE)
2483 #if HOST_BITS_PER_WIDE_INT >= 64
2484 c0 = CONST_DOUBLE_LOW (source);
2485 c1 = -(c0 < 0);
2486 #else
2487 c0 = CONST_DOUBLE_LOW (source);
2488 c1 = CONST_DOUBLE_HIGH (source);
2489 #endif
2491 else
2492 abort ();
2494 result = rs6000_emit_set_long_const (dest, c0, c1);
2496 else
2497 abort ();
2499 insn = get_last_insn ();
2500 set = single_set (insn);
2501 if (! CONSTANT_P (SET_SRC (set)))
2502 set_unique_reg_note (insn, REG_EQUAL, source);
2504 return result;
2507 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
2508 fall back to a straight forward decomposition. We do this to avoid
2509 exponential run times encountered when looking for longer sequences
2510 with rs6000_emit_set_const. */
2511 static rtx
2512 rs6000_emit_set_long_const (dest, c1, c2)
2513 rtx dest;
2514 HOST_WIDE_INT c1, c2;
2516 if (!TARGET_POWERPC64)
2518 rtx operand1, operand2;
2520 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
2521 DImode);
2522 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
2523 DImode);
2524 emit_move_insn (operand1, GEN_INT (c1));
2525 emit_move_insn (operand2, GEN_INT (c2));
2527 else
2529 HOST_WIDE_INT ud1, ud2, ud3, ud4;
2531 ud1 = c1 & 0xffff;
2532 ud2 = (c1 & 0xffff0000) >> 16;
2533 #if HOST_BITS_PER_WIDE_INT >= 64
2534 c2 = c1 >> 32;
2535 #endif
2536 ud3 = c2 & 0xffff;
2537 ud4 = (c2 & 0xffff0000) >> 16;
2539 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
2540 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2542 if (ud1 & 0x8000)
2543 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
2544 else
2545 emit_move_insn (dest, GEN_INT (ud1));
2548 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
2549 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
2551 if (ud2 & 0x8000)
2552 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
2553 - 0x80000000));
2554 else
2555 emit_move_insn (dest, GEN_INT (ud2 << 16));
2556 if (ud1 != 0)
2557 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2559 else if ((ud4 == 0xffff && (ud3 & 0x8000))
2560 || (ud4 == 0 && ! (ud3 & 0x8000)))
2562 if (ud3 & 0x8000)
2563 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
2564 - 0x80000000));
2565 else
2566 emit_move_insn (dest, GEN_INT (ud3 << 16));
2568 if (ud2 != 0)
2569 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
2570 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
2571 if (ud1 != 0)
2572 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2574 else
2576 if (ud4 & 0x8000)
2577 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
2578 - 0x80000000));
2579 else
2580 emit_move_insn (dest, GEN_INT (ud4 << 16));
2582 if (ud3 != 0)
2583 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
2585 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
2586 if (ud2 != 0)
2587 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
2588 GEN_INT (ud2 << 16)));
2589 if (ud1 != 0)
2590 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2593 return dest;
2596 /* Emit a move from SOURCE to DEST in mode MODE. */
2597 void
2598 rs6000_emit_move (dest, source, mode)
2599 rtx dest;
2600 rtx source;
2601 enum machine_mode mode;
2603 rtx operands[2];
2604 operands[0] = dest;
2605 operands[1] = source;
2607 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
2608 if (GET_CODE (operands[1]) == CONST_DOUBLE
2609 && ! FLOAT_MODE_P (mode)
2610 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2612 /* FIXME. This should never happen. */
2613 /* Since it seems that it does, do the safe thing and convert
2614 to a CONST_INT. */
2615 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
2617 if (GET_CODE (operands[1]) == CONST_DOUBLE
2618 && ! FLOAT_MODE_P (mode)
2619 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
2620 && CONST_DOUBLE_LOW (operands[1]) >= 0)
2621 || (CONST_DOUBLE_HIGH (operands[1]) == -1
2622 && CONST_DOUBLE_LOW (operands[1]) < 0)))
2623 abort ();
2625 /* Check if GCC is setting up a block move that will end up using FP
2626 registers as temporaries. We must make sure this is acceptable. */
2627 if (GET_CODE (operands[0]) == MEM
2628 && GET_CODE (operands[1]) == MEM
2629 && mode == DImode
2630 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
2631 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
2632 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
2633 ? 32 : MEM_ALIGN (operands[0])))
2634 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
2635 ? 32
2636 : MEM_ALIGN (operands[1]))))
2637 && ! MEM_VOLATILE_P (operands [0])
2638 && ! MEM_VOLATILE_P (operands [1]))
2640 emit_move_insn (adjust_address (operands[0], SImode, 0),
2641 adjust_address (operands[1], SImode, 0));
2642 emit_move_insn (adjust_address (operands[0], SImode, 4),
2643 adjust_address (operands[1], SImode, 4));
2644 return;
2647 if (!no_new_pseudos)
2649 if (GET_CODE (operands[1]) == MEM && optimize > 0
2650 && (mode == QImode || mode == HImode || mode == SImode)
2651 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2653 rtx reg = gen_reg_rtx (word_mode);
2655 emit_insn (gen_rtx_SET (word_mode, reg,
2656 gen_rtx_ZERO_EXTEND (word_mode,
2657 operands[1])));
2658 operands[1] = gen_lowpart (mode, reg);
2660 if (GET_CODE (operands[0]) != REG)
2661 operands[1] = force_reg (mode, operands[1]);
2664 if (mode == SFmode && ! TARGET_POWERPC
2665 && TARGET_HARD_FLOAT && TARGET_FPRS
2666 && GET_CODE (operands[0]) == MEM)
2668 int regnum;
2670 if (reload_in_progress || reload_completed)
2671 regnum = true_regnum (operands[1]);
2672 else if (GET_CODE (operands[1]) == REG)
2673 regnum = REGNO (operands[1]);
2674 else
2675 regnum = -1;
2677 /* If operands[1] is a register, on POWER it may have
2678 double-precision data in it, so truncate it to single
2679 precision. */
2680 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
2682 rtx newreg;
2683 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
2684 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
2685 operands[1] = newreg;
2689 /* Handle the case where reload calls us with an invalid address;
2690 and the case of CONSTANT_P_RTX. */
2691 if (!ALTIVEC_VECTOR_MODE (mode)
2692 && (! general_operand (operands[1], mode)
2693 || ! nonimmediate_operand (operands[0], mode)
2694 || GET_CODE (operands[1]) == CONSTANT_P_RTX))
2696 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2697 return;
2700 /* FIXME: In the long term, this switch statement should go away
2701 and be replaced by a sequence of tests based on things like
2702 mode == Pmode. */
2703 switch (mode)
2705 case HImode:
2706 case QImode:
2707 if (CONSTANT_P (operands[1])
2708 && GET_CODE (operands[1]) != CONST_INT)
2709 operands[1] = force_const_mem (mode, operands[1]);
2710 break;
2712 case TFmode:
2713 case DFmode:
2714 case SFmode:
2715 if (CONSTANT_P (operands[1])
2716 && ! easy_fp_constant (operands[1], mode))
2717 operands[1] = force_const_mem (mode, operands[1]);
2718 break;
2720 case V16QImode:
2721 case V8HImode:
2722 case V4SFmode:
2723 case V4SImode:
2724 case V4HImode:
2725 case V2SFmode:
2726 case V2SImode:
2727 case V1DImode:
2728 if (CONSTANT_P (operands[1])
2729 && !easy_vector_constant (operands[1]))
2730 operands[1] = force_const_mem (mode, operands[1]);
2731 break;
2733 case SImode:
2734 case DImode:
2735 /* Use default pattern for address of ELF small data */
2736 if (TARGET_ELF
2737 && mode == Pmode
2738 && DEFAULT_ABI == ABI_V4
2739 && (GET_CODE (operands[1]) == SYMBOL_REF
2740 || GET_CODE (operands[1]) == CONST)
2741 && small_data_operand (operands[1], mode))
2743 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2744 return;
2747 if (DEFAULT_ABI == ABI_V4
2748 && mode == Pmode && mode == SImode
2749 && flag_pic == 1 && got_operand (operands[1], mode))
2751 emit_insn (gen_movsi_got (operands[0], operands[1]));
2752 return;
2755 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
2756 && TARGET_NO_TOC && ! flag_pic
2757 && mode == Pmode
2758 && CONSTANT_P (operands[1])
2759 && GET_CODE (operands[1]) != HIGH
2760 && GET_CODE (operands[1]) != CONST_INT)
2762 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
2764 /* If this is a function address on -mcall-aixdesc,
2765 convert it to the address of the descriptor. */
2766 if (DEFAULT_ABI == ABI_AIX
2767 && GET_CODE (operands[1]) == SYMBOL_REF
2768 && XSTR (operands[1], 0)[0] == '.')
2770 const char *name = XSTR (operands[1], 0);
2771 rtx new_ref;
2772 while (*name == '.')
2773 name++;
2774 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
2775 CONSTANT_POOL_ADDRESS_P (new_ref)
2776 = CONSTANT_POOL_ADDRESS_P (operands[1]);
2777 SYMBOL_REF_FLAG (new_ref) = SYMBOL_REF_FLAG (operands[1]);
2778 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
2779 operands[1] = new_ref;
2782 if (DEFAULT_ABI == ABI_DARWIN)
2784 emit_insn (gen_macho_high (target, operands[1]));
2785 emit_insn (gen_macho_low (operands[0], target, operands[1]));
2786 return;
2789 emit_insn (gen_elf_high (target, operands[1]));
2790 emit_insn (gen_elf_low (operands[0], target, operands[1]));
2791 return;
2794 /* If this is a SYMBOL_REF that refers to a constant pool entry,
2795 and we have put it in the TOC, we just need to make a TOC-relative
2796 reference to it. */
2797 if (TARGET_TOC
2798 && GET_CODE (operands[1]) == SYMBOL_REF
2799 && CONSTANT_POOL_EXPR_P (operands[1])
2800 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
2801 get_pool_mode (operands[1])))
2803 operands[1] = create_TOC_reference (operands[1]);
2805 else if (mode == Pmode
2806 && CONSTANT_P (operands[1])
2807 && ((GET_CODE (operands[1]) != CONST_INT
2808 && ! easy_fp_constant (operands[1], mode))
2809 || (GET_CODE (operands[1]) == CONST_INT
2810 && num_insns_constant (operands[1], mode) > 2)
2811 || (GET_CODE (operands[0]) == REG
2812 && FP_REGNO_P (REGNO (operands[0]))))
2813 && GET_CODE (operands[1]) != HIGH
2814 && ! LEGITIMATE_CONSTANT_POOL_ADDRESS_P (operands[1])
2815 && ! TOC_RELATIVE_EXPR_P (operands[1]))
2817 /* Emit a USE operation so that the constant isn't deleted if
2818 expensive optimizations are turned on because nobody
2819 references it. This should only be done for operands that
2820 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
2821 This should not be done for operands that contain LABEL_REFs.
2822 For now, we just handle the obvious case. */
2823 if (GET_CODE (operands[1]) != LABEL_REF)
2824 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
2826 #if TARGET_MACHO
2827 /* Darwin uses a special PIC legitimizer. */
2828 if (DEFAULT_ABI == ABI_DARWIN && flag_pic)
2830 operands[1] =
2831 rs6000_machopic_legitimize_pic_address (operands[1], mode,
2832 operands[0]);
2833 if (operands[0] != operands[1])
2834 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2835 return;
2837 #endif
2839 /* If we are to limit the number of things we put in the TOC and
2840 this is a symbol plus a constant we can add in one insn,
2841 just put the symbol in the TOC and add the constant. Don't do
2842 this if reload is in progress. */
2843 if (GET_CODE (operands[1]) == CONST
2844 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
2845 && GET_CODE (XEXP (operands[1], 0)) == PLUS
2846 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
2847 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
2848 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
2849 && ! side_effects_p (operands[0]))
2851 rtx sym =
2852 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
2853 rtx other = XEXP (XEXP (operands[1], 0), 1);
2855 sym = force_reg (mode, sym);
2856 if (mode == SImode)
2857 emit_insn (gen_addsi3 (operands[0], sym, other));
2858 else
2859 emit_insn (gen_adddi3 (operands[0], sym, other));
2860 return;
2863 operands[1] = force_const_mem (mode, operands[1]);
2865 if (TARGET_TOC
2866 && CONSTANT_POOL_EXPR_P (XEXP (operands[1], 0))
2867 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
2868 get_pool_constant (XEXP (operands[1], 0)),
2869 get_pool_mode (XEXP (operands[1], 0))))
2871 operands[1]
2872 = gen_rtx_MEM (mode,
2873 create_TOC_reference (XEXP (operands[1], 0)));
2874 set_mem_alias_set (operands[1], get_TOC_alias_set ());
2875 RTX_UNCHANGING_P (operands[1]) = 1;
2878 break;
2880 case TImode:
2881 if (GET_CODE (operands[0]) == MEM
2882 && GET_CODE (XEXP (operands[0], 0)) != REG
2883 && ! reload_in_progress)
2884 operands[0]
2885 = replace_equiv_address (operands[0],
2886 copy_addr_to_reg (XEXP (operands[0], 0)));
2888 if (GET_CODE (operands[1]) == MEM
2889 && GET_CODE (XEXP (operands[1], 0)) != REG
2890 && ! reload_in_progress)
2891 operands[1]
2892 = replace_equiv_address (operands[1],
2893 copy_addr_to_reg (XEXP (operands[1], 0)));
2894 if (TARGET_POWER)
2896 emit_insn (gen_rtx_PARALLEL (VOIDmode,
2897 gen_rtvec (2,
2898 gen_rtx_SET (VOIDmode,
2899 operands[0], operands[1]),
2900 gen_rtx_CLOBBER (VOIDmode,
2901 gen_rtx_SCRATCH (SImode)))));
2902 return;
2904 break;
2906 default:
2907 abort ();
2910 /* Above, we may have called force_const_mem which may have returned
2911 an invalid address. If we can, fix this up; otherwise, reload will
2912 have to deal with it. */
2913 if (GET_CODE (operands[1]) == MEM
2914 && ! memory_address_p (mode, XEXP (operands[1], 0))
2915 && ! reload_in_progress)
2916 operands[1] = adjust_address (operands[1], mode, 0);
2918 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2919 return;
2922 /* Initialize a variable CUM of type CUMULATIVE_ARGS
2923 for a call to a function whose data type is FNTYPE.
2924 For a library call, FNTYPE is 0.
2926 For incoming args we set the number of arguments in the prototype large
2927 so we never return a PARALLEL. */
2929 void
2930 init_cumulative_args (cum, fntype, libname, incoming)
2931 CUMULATIVE_ARGS *cum;
2932 tree fntype;
2933 rtx libname ATTRIBUTE_UNUSED;
2934 int incoming;
2936 static CUMULATIVE_ARGS zero_cumulative;
2938 *cum = zero_cumulative;
2939 cum->words = 0;
2940 cum->fregno = FP_ARG_MIN_REG;
2941 cum->vregno = ALTIVEC_ARG_MIN_REG;
2942 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
2943 cum->call_cookie = CALL_NORMAL;
2944 cum->sysv_gregno = GP_ARG_MIN_REG;
2946 if (incoming)
2947 cum->nargs_prototype = 1000; /* don't return a PARALLEL */
2949 else if (cum->prototype)
2950 cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
2951 + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
2952 || RETURN_IN_MEMORY (TREE_TYPE (fntype))));
2954 else
2955 cum->nargs_prototype = 0;
2957 cum->orig_nargs = cum->nargs_prototype;
2959 /* Check for a longcall attribute. */
2960 if (fntype
2961 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
2962 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
2963 cum->call_cookie = CALL_LONG;
2965 if (TARGET_DEBUG_ARG)
2967 fprintf (stderr, "\ninit_cumulative_args:");
2968 if (fntype)
2970 tree ret_type = TREE_TYPE (fntype);
2971 fprintf (stderr, " ret code = %s,",
2972 tree_code_name[ (int)TREE_CODE (ret_type) ]);
2975 if (cum->call_cookie & CALL_LONG)
2976 fprintf (stderr, " longcall,");
2978 fprintf (stderr, " proto = %d, nargs = %d\n",
2979 cum->prototype, cum->nargs_prototype);
2983 /* If defined, a C expression which determines whether, and in which
2984 direction, to pad out an argument with extra space. The value
2985 should be of type `enum direction': either `upward' to pad above
2986 the argument, `downward' to pad below, or `none' to inhibit
2987 padding.
2989 For the AIX ABI structs are always stored left shifted in their
2990 argument slot. */
2992 enum direction
2993 function_arg_padding (mode, type)
2994 enum machine_mode mode;
2995 tree type;
2997 if (type != 0 && AGGREGATE_TYPE_P (type))
2998 return upward;
3000 /* This is the default definition. */
3001 return (! BYTES_BIG_ENDIAN
3002 ? upward
3003 : ((mode == BLKmode
3004 ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
3005 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
3006 : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
3007 ? downward : upward));
3010 /* If defined, a C expression that gives the alignment boundary, in bits,
3011 of an argument with the specified mode and type. If it is not defined,
3012 PARM_BOUNDARY is used for all arguments.
3014 V.4 wants long longs to be double word aligned. */
3017 function_arg_boundary (mode, type)
3018 enum machine_mode mode;
3019 tree type ATTRIBUTE_UNUSED;
3021 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
3022 return 64;
3023 else if (SPE_VECTOR_MODE (mode))
3024 return 64;
3025 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3026 return 128;
3027 else
3028 return PARM_BOUNDARY;
3031 /* Update the data in CUM to advance over an argument
3032 of mode MODE and data type TYPE.
3033 (TYPE is null for libcalls where that information may not be available.) */
3035 void
3036 function_arg_advance (cum, mode, type, named)
3037 CUMULATIVE_ARGS *cum;
3038 enum machine_mode mode;
3039 tree type;
3040 int named;
3042 cum->nargs_prototype--;
3044 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3046 if (cum->vregno <= ALTIVEC_ARG_MAX_REG && cum->nargs_prototype >= 0)
3047 cum->vregno++;
3048 else
3049 cum->words += RS6000_ARG_SIZE (mode, type);
3051 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
3052 && named && cum->sysv_gregno <= GP_ARG_MAX_REG)
3053 cum->sysv_gregno++;
3054 else if (DEFAULT_ABI == ABI_V4)
3056 if (TARGET_HARD_FLOAT && TARGET_FPRS
3057 && (mode == SFmode || mode == DFmode))
3059 if (cum->fregno <= FP_ARG_V4_MAX_REG)
3060 cum->fregno++;
3061 else
3063 if (mode == DFmode)
3064 cum->words += cum->words & 1;
3065 cum->words += RS6000_ARG_SIZE (mode, type);
3068 else
3070 int n_words;
3071 int gregno = cum->sysv_gregno;
3073 /* Aggregates and IEEE quad get passed by reference. */
3074 if ((type && AGGREGATE_TYPE_P (type))
3075 || mode == TFmode)
3076 n_words = 1;
3077 else
3078 n_words = RS6000_ARG_SIZE (mode, type);
3080 /* Long long and SPE vectors are put in odd registers. */
3081 if (n_words == 2 && (gregno & 1) == 0)
3082 gregno += 1;
3084 /* Long long and SPE vectors are not split between registers
3085 and stack. */
3086 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
3088 /* Long long is aligned on the stack. */
3089 if (n_words == 2)
3090 cum->words += cum->words & 1;
3091 cum->words += n_words;
3094 /* Note: continuing to accumulate gregno past when we've started
3095 spilling to the stack indicates the fact that we've started
3096 spilling to the stack to expand_builtin_saveregs. */
3097 cum->sysv_gregno = gregno + n_words;
3100 if (TARGET_DEBUG_ARG)
3102 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3103 cum->words, cum->fregno);
3104 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
3105 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
3106 fprintf (stderr, "mode = %4s, named = %d\n",
3107 GET_MODE_NAME (mode), named);
3110 else
3112 int align = (TARGET_32BIT && (cum->words & 1) != 0
3113 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3115 cum->words += align + RS6000_ARG_SIZE (mode, type);
3117 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3118 && TARGET_HARD_FLOAT && TARGET_FPRS)
3119 cum->fregno += (mode == TFmode ? 2 : 1);
3121 if (TARGET_DEBUG_ARG)
3123 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3124 cum->words, cum->fregno);
3125 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
3126 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
3127 fprintf (stderr, "named = %d, align = %d\n", named, align);
3132 /* Determine where to put an argument to a function.
3133 Value is zero to push the argument on the stack,
3134 or a hard register in which to store the argument.
3136 MODE is the argument's machine mode.
3137 TYPE is the data type of the argument (as a tree).
3138 This is null for libcalls where that information may
3139 not be available.
3140 CUM is a variable of type CUMULATIVE_ARGS which gives info about
3141 the preceding args and about the function being called.
3142 NAMED is nonzero if this argument is a named parameter
3143 (otherwise it is an extra parameter matching an ellipsis).
3145 On RS/6000 the first eight words of non-FP are normally in registers
3146 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
3147 Under V.4, the first 8 FP args are in registers.
3149 If this is floating-point and no prototype is specified, we use
3150 both an FP and integer register (or possibly FP reg and stack). Library
3151 functions (when TYPE is zero) always have the proper types for args,
3152 so we can pass the FP value just in one register. emit_library_function
3153 doesn't support PARALLEL anyway. */
3155 struct rtx_def *
3156 function_arg (cum, mode, type, named)
3157 CUMULATIVE_ARGS *cum;
3158 enum machine_mode mode;
3159 tree type;
3160 int named;
3162 enum rs6000_abi abi = DEFAULT_ABI;
3164 /* Return a marker to indicate whether CR1 needs to set or clear the
3165 bit that V.4 uses to say fp args were passed in registers.
3166 Assume that we don't need the marker for software floating point,
3167 or compiler generated library calls. */
3168 if (mode == VOIDmode)
3170 if (abi == ABI_V4
3171 && cum->nargs_prototype < 0
3172 && type && (cum->prototype || TARGET_NO_PROTOTYPE))
3174 /* For the SPE, we need to crxor CR6 always. */
3175 if (TARGET_SPE_ABI)
3176 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
3177 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
3178 return GEN_INT (cum->call_cookie
3179 | ((cum->fregno == FP_ARG_MIN_REG)
3180 ? CALL_V4_SET_FP_ARGS
3181 : CALL_V4_CLEAR_FP_ARGS));
3184 return GEN_INT (cum->call_cookie);
3187 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3189 if (named && cum->vregno <= ALTIVEC_ARG_MAX_REG)
3190 return gen_rtx_REG (mode, cum->vregno);
3191 else
3192 return NULL;
3194 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode) && named)
3196 if (cum->sysv_gregno <= GP_ARG_MAX_REG)
3197 return gen_rtx_REG (mode, cum->sysv_gregno);
3198 else
3199 return NULL;
3201 else if (abi == ABI_V4)
3203 if (TARGET_HARD_FLOAT && TARGET_FPRS
3204 && (mode == SFmode || mode == DFmode))
3206 if (cum->fregno <= FP_ARG_V4_MAX_REG)
3207 return gen_rtx_REG (mode, cum->fregno);
3208 else
3209 return NULL;
3211 else
3213 int n_words;
3214 int gregno = cum->sysv_gregno;
3216 /* Aggregates and IEEE quad get passed by reference. */
3217 if ((type && AGGREGATE_TYPE_P (type))
3218 || mode == TFmode)
3219 n_words = 1;
3220 else
3221 n_words = RS6000_ARG_SIZE (mode, type);
3223 /* Long long and SPE vectors are put in odd registers. */
3224 if (n_words == 2 && (gregno & 1) == 0)
3225 gregno += 1;
3227 /* Long long and SPE vectors are not split between registers
3228 and stack. */
3229 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
3231 /* SPE vectors in ... get split into 2 registers. */
3232 if (TARGET_SPE && TARGET_SPE_ABI
3233 && SPE_VECTOR_MODE (mode) && !named)
3235 rtx r1, r2;
3236 enum machine_mode m = SImode;
3238 r1 = gen_rtx_REG (m, gregno);
3239 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
3240 r2 = gen_rtx_REG (m, gregno + 1);
3241 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
3242 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
3244 return gen_rtx_REG (mode, gregno);
3246 else
3247 return NULL;
3250 else
3252 int align = (TARGET_32BIT && (cum->words & 1) != 0
3253 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3254 int align_words = cum->words + align;
3256 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3257 return NULL_RTX;
3259 if (USE_FP_FOR_ARG_P (*cum, mode, type))
3261 if (! type
3262 || ((cum->nargs_prototype > 0)
3263 /* IBM AIX extended its linkage convention definition always
3264 to require FP args after register save area hole on the
3265 stack. */
3266 && (DEFAULT_ABI != ABI_AIX
3267 || ! TARGET_XL_CALL
3268 || (align_words < GP_ARG_NUM_REG))))
3269 return gen_rtx_REG (mode, cum->fregno);
3271 return gen_rtx_PARALLEL (mode,
3272 gen_rtvec (2,
3273 gen_rtx_EXPR_LIST (VOIDmode,
3274 ((align_words >= GP_ARG_NUM_REG)
3275 ? NULL_RTX
3276 : (align_words
3277 + RS6000_ARG_SIZE (mode, type)
3278 > GP_ARG_NUM_REG
3279 /* If this is partially on the stack, then
3280 we only include the portion actually
3281 in registers here. */
3282 ? gen_rtx_REG (SImode,
3283 GP_ARG_MIN_REG + align_words)
3284 : gen_rtx_REG (mode,
3285 GP_ARG_MIN_REG + align_words))),
3286 const0_rtx),
3287 gen_rtx_EXPR_LIST (VOIDmode,
3288 gen_rtx_REG (mode, cum->fregno),
3289 const0_rtx)));
3291 else if (align_words < GP_ARG_NUM_REG)
3292 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
3293 else
3294 return NULL_RTX;
3298 /* For an arg passed partly in registers and partly in memory,
3299 this is the number of registers used.
3300 For args passed entirely in registers or entirely in memory, zero. */
3303 function_arg_partial_nregs (cum, mode, type, named)
3304 CUMULATIVE_ARGS *cum;
3305 enum machine_mode mode;
3306 tree type;
3307 int named ATTRIBUTE_UNUSED;
3309 if (DEFAULT_ABI == ABI_V4)
3310 return 0;
3312 if (USE_FP_FOR_ARG_P (*cum, mode, type)
3313 || USE_ALTIVEC_FOR_ARG_P (*cum, mode, type))
3315 if (cum->nargs_prototype >= 0)
3316 return 0;
3319 if (cum->words < GP_ARG_NUM_REG
3320 && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
3322 int ret = GP_ARG_NUM_REG - cum->words;
3323 if (ret && TARGET_DEBUG_ARG)
3324 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
3326 return ret;
3329 return 0;
3332 /* A C expression that indicates when an argument must be passed by
3333 reference. If nonzero for an argument, a copy of that argument is
3334 made in memory and a pointer to the argument is passed instead of
3335 the argument itself. The pointer is passed in whatever way is
3336 appropriate for passing a pointer to that type.
3338 Under V.4, structures and unions are passed by reference.
3340 As an extension to all ABIs, variable sized types are passed by
3341 reference. */
3344 function_arg_pass_by_reference (cum, mode, type, named)
3345 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
3346 enum machine_mode mode ATTRIBUTE_UNUSED;
3347 tree type;
3348 int named ATTRIBUTE_UNUSED;
3350 if (DEFAULT_ABI == ABI_V4
3351 && ((type && AGGREGATE_TYPE_P (type))
3352 || mode == TFmode))
3354 if (TARGET_DEBUG_ARG)
3355 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
3357 return 1;
3359 return type && int_size_in_bytes (type) <= 0;
3362 /* Perform any needed actions needed for a function that is receiving a
3363 variable number of arguments.
3365 CUM is as above.
3367 MODE and TYPE are the mode and type of the current parameter.
3369 PRETEND_SIZE is a variable that should be set to the amount of stack
3370 that must be pushed by the prolog to pretend that our caller pushed
3373 Normally, this macro will push all remaining incoming registers on the
3374 stack and set PRETEND_SIZE to the length of the registers pushed. */
3376 void
3377 setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
3378 CUMULATIVE_ARGS *cum;
3379 enum machine_mode mode;
3380 tree type;
3381 int *pretend_size ATTRIBUTE_UNUSED;
3382 int no_rtl;
3385 CUMULATIVE_ARGS next_cum;
3386 int reg_size = TARGET_32BIT ? 4 : 8;
3387 rtx save_area = NULL_RTX, mem;
3388 int first_reg_offset, set;
3389 tree fntype;
3390 int stdarg_p;
3392 fntype = TREE_TYPE (current_function_decl);
3393 stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
3394 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3395 != void_type_node));
3397 /* For varargs, we do not want to skip the dummy va_dcl argument.
3398 For stdargs, we do want to skip the last named argument. */
3399 next_cum = *cum;
3400 if (stdarg_p)
3401 function_arg_advance (&next_cum, mode, type, 1);
3403 if (DEFAULT_ABI == ABI_V4)
3405 /* Indicate to allocate space on the stack for varargs save area. */
3406 cfun->machine->sysv_varargs_p = 1;
3407 if (! no_rtl)
3408 save_area = plus_constant (virtual_stack_vars_rtx,
3409 - RS6000_VARARGS_SIZE);
3411 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
3413 else
3415 first_reg_offset = next_cum.words;
3416 save_area = virtual_incoming_args_rtx;
3417 cfun->machine->sysv_varargs_p = 0;
3419 if (MUST_PASS_IN_STACK (mode, type))
3420 first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
3423 set = get_varargs_alias_set ();
3424 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
3426 mem = gen_rtx_MEM (BLKmode,
3427 plus_constant (save_area,
3428 first_reg_offset * reg_size)),
3429 set_mem_alias_set (mem, set);
3430 set_mem_align (mem, BITS_PER_WORD);
3432 move_block_from_reg
3433 (GP_ARG_MIN_REG + first_reg_offset, mem,
3434 GP_ARG_NUM_REG - first_reg_offset,
3435 (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD);
3438 /* Save FP registers if needed. */
3439 if (DEFAULT_ABI == ABI_V4
3440 && TARGET_HARD_FLOAT && TARGET_FPRS
3441 && ! no_rtl
3442 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
3444 int fregno = next_cum.fregno;
3445 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
3446 rtx lab = gen_label_rtx ();
3447 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
3449 emit_jump_insn (gen_rtx_SET (VOIDmode,
3450 pc_rtx,
3451 gen_rtx_IF_THEN_ELSE (VOIDmode,
3452 gen_rtx_NE (VOIDmode, cr1,
3453 const0_rtx),
3454 gen_rtx_LABEL_REF (VOIDmode, lab),
3455 pc_rtx)));
3457 while (fregno <= FP_ARG_V4_MAX_REG)
3459 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
3460 set_mem_alias_set (mem, set);
3461 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
3462 fregno++;
3463 off += 8;
3466 emit_label (lab);
3470 /* Create the va_list data type. */
3472 tree
3473 rs6000_build_va_list ()
3475 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
3477 /* For AIX, prefer 'char *' because that's what the system
3478 header files like. */
3479 if (DEFAULT_ABI != ABI_V4)
3480 return build_pointer_type (char_type_node);
3482 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3483 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
3485 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
3486 unsigned_char_type_node);
3487 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
3488 unsigned_char_type_node);
3489 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
3490 ptr_type_node);
3491 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
3492 ptr_type_node);
3494 DECL_FIELD_CONTEXT (f_gpr) = record;
3495 DECL_FIELD_CONTEXT (f_fpr) = record;
3496 DECL_FIELD_CONTEXT (f_ovf) = record;
3497 DECL_FIELD_CONTEXT (f_sav) = record;
3499 TREE_CHAIN (record) = type_decl;
3500 TYPE_NAME (record) = type_decl;
3501 TYPE_FIELDS (record) = f_gpr;
3502 TREE_CHAIN (f_gpr) = f_fpr;
3503 TREE_CHAIN (f_fpr) = f_ovf;
3504 TREE_CHAIN (f_ovf) = f_sav;
3506 layout_type (record);
3508 /* The correct type is an array type of one element. */
3509 return build_array_type (record, build_index_type (size_zero_node));
3512 /* Implement va_start. */
3514 void
3515 rs6000_va_start (valist, nextarg)
3516 tree valist;
3517 rtx nextarg;
3519 HOST_WIDE_INT words, n_gpr, n_fpr;
3520 tree f_gpr, f_fpr, f_ovf, f_sav;
3521 tree gpr, fpr, ovf, sav, t;
3523 /* Only SVR4 needs something special. */
3524 if (DEFAULT_ABI != ABI_V4)
3526 std_expand_builtin_va_start (valist, nextarg);
3527 return;
3530 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3531 f_fpr = TREE_CHAIN (f_gpr);
3532 f_ovf = TREE_CHAIN (f_fpr);
3533 f_sav = TREE_CHAIN (f_ovf);
3535 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3536 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3537 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3538 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3539 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3541 /* Count number of gp and fp argument registers used. */
3542 words = current_function_args_info.words;
3543 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
3544 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
3546 if (TARGET_DEBUG_ARG)
3548 fputs ("va_start: words = ", stderr);
3549 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, words);
3550 fputs (", n_gpr = ", stderr);
3551 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_gpr);
3552 fputs (", n_fpr = ", stderr);
3553 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_fpr);
3554 putc ('\n', stderr);
3557 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
3558 TREE_SIDE_EFFECTS (t) = 1;
3559 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3561 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
3562 TREE_SIDE_EFFECTS (t) = 1;
3563 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3565 /* Find the overflow area. */
3566 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
3567 if (words != 0)
3568 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
3569 build_int_2 (words * UNITS_PER_WORD, 0));
3570 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3571 TREE_SIDE_EFFECTS (t) = 1;
3572 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3574 /* Find the register save area. */
3575 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
3576 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
3577 build_int_2 (-RS6000_VARARGS_SIZE, -1));
3578 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
3579 TREE_SIDE_EFFECTS (t) = 1;
3580 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3583 /* Implement va_arg. */
3586 rs6000_va_arg (valist, type)
3587 tree valist, type;
3589 tree f_gpr, f_fpr, f_ovf, f_sav;
3590 tree gpr, fpr, ovf, sav, reg, t, u;
3591 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
3592 rtx lab_false, lab_over, addr_rtx, r;
3594 if (DEFAULT_ABI != ABI_V4)
3596 /* Variable sized types are passed by reference. */
3597 if (int_size_in_bytes (type) <= 0)
3599 u = build_pointer_type (type);
3601 /* Args grow upward. */
3602 t = build (POSTINCREMENT_EXPR, TREE_TYPE (valist), valist,
3603 build_int_2 (POINTER_SIZE / BITS_PER_UNIT, 0));
3604 TREE_SIDE_EFFECTS (t) = 1;
3606 t = build1 (NOP_EXPR, build_pointer_type (u), t);
3607 TREE_SIDE_EFFECTS (t) = 1;
3609 t = build1 (INDIRECT_REF, u, t);
3610 TREE_SIDE_EFFECTS (t) = 1;
3612 return expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL);
3614 else
3615 return std_expand_builtin_va_arg (valist, type);
3618 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3619 f_fpr = TREE_CHAIN (f_gpr);
3620 f_ovf = TREE_CHAIN (f_fpr);
3621 f_sav = TREE_CHAIN (f_ovf);
3623 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3624 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3625 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3626 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3627 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3629 size = int_size_in_bytes (type);
3630 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3632 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
3634 /* Aggregates and long doubles are passed by reference. */
3635 indirect_p = 1;
3636 reg = gpr;
3637 n_reg = 1;
3638 sav_ofs = 0;
3639 sav_scale = 4;
3640 size = UNITS_PER_WORD;
3641 rsize = 1;
3643 else if (FLOAT_TYPE_P (type) && TARGET_HARD_FLOAT && TARGET_FPRS)
3645 /* FP args go in FP registers, if present. */
3646 indirect_p = 0;
3647 reg = fpr;
3648 n_reg = 1;
3649 sav_ofs = 8*4;
3650 sav_scale = 8;
3652 else
3654 /* Otherwise into GP registers. */
3655 indirect_p = 0;
3656 reg = gpr;
3657 n_reg = rsize;
3658 sav_ofs = 0;
3659 sav_scale = 4;
3662 /* Pull the value out of the saved registers ... */
3664 lab_false = gen_label_rtx ();
3665 lab_over = gen_label_rtx ();
3666 addr_rtx = gen_reg_rtx (Pmode);
3668 /* AltiVec vectors never go in registers. */
3669 if (!TARGET_ALTIVEC || TREE_CODE (type) != VECTOR_TYPE)
3671 TREE_THIS_VOLATILE (reg) = 1;
3672 emit_cmp_and_jump_insns
3673 (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
3674 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
3675 lab_false);
3677 /* Long long is aligned in the registers. */
3678 if (n_reg > 1)
3680 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
3681 build_int_2 (n_reg - 1, 0));
3682 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
3683 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
3684 TREE_SIDE_EFFECTS (u) = 1;
3685 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
3688 if (sav_ofs)
3689 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
3690 else
3691 t = sav;
3693 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
3694 build_int_2 (n_reg, 0));
3695 TREE_SIDE_EFFECTS (u) = 1;
3697 u = build1 (CONVERT_EXPR, integer_type_node, u);
3698 TREE_SIDE_EFFECTS (u) = 1;
3700 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
3701 TREE_SIDE_EFFECTS (u) = 1;
3703 t = build (PLUS_EXPR, ptr_type_node, t, u);
3704 TREE_SIDE_EFFECTS (t) = 1;
3706 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3707 if (r != addr_rtx)
3708 emit_move_insn (addr_rtx, r);
3710 emit_jump_insn (gen_jump (lab_over));
3711 emit_barrier ();
3714 emit_label (lab_false);
3716 /* ... otherwise out of the overflow area. */
3718 /* Make sure we don't find reg 7 for the next int arg.
3720 All AltiVec vectors go in the overflow area. So in the AltiVec
3721 case we need to get the vectors from the overflow area, but
3722 remember where the GPRs and FPRs are. */
3723 if (n_reg > 1 && (TREE_CODE (type) != VECTOR_TYPE
3724 || !TARGET_ALTIVEC))
3726 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
3727 TREE_SIDE_EFFECTS (t) = 1;
3728 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3731 /* Care for on-stack alignment if needed. */
3732 if (rsize <= 1)
3733 t = ovf;
3734 else
3736 int align;
3738 /* AltiVec vectors are 16 byte aligned. */
3739 if (TARGET_ALTIVEC && TREE_CODE (type) == VECTOR_TYPE)
3740 align = 15;
3741 else
3742 align = 7;
3744 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
3745 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
3747 t = save_expr (t);
3749 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3750 if (r != addr_rtx)
3751 emit_move_insn (addr_rtx, r);
3753 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
3754 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3755 TREE_SIDE_EFFECTS (t) = 1;
3756 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3758 emit_label (lab_over);
3760 if (indirect_p)
3762 r = gen_rtx_MEM (Pmode, addr_rtx);
3763 set_mem_alias_set (r, get_varargs_alias_set ());
3764 emit_move_insn (addr_rtx, r);
3767 return addr_rtx;
3770 /* Builtins. */
3772 #define def_builtin(MASK, NAME, TYPE, CODE) \
3773 do { \
3774 if ((MASK) & target_flags) \
3775 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
3776 NULL, NULL_TREE); \
3777 } while (0)
3779 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
3781 static const struct builtin_description bdesc_3arg[] =
3783 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
3784 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
3785 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
3786 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
3787 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
3788 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
3789 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
3790 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
3791 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
3792 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
3793 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
3794 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
3795 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
3796 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
3797 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
3798 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
3799 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
3800 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
3801 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
3802 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
3803 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
3804 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
3805 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
3808 /* DST operations: void foo (void *, const int, const char). */
3810 static const struct builtin_description bdesc_dst[] =
3812 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
3813 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
3814 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
3815 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
3818 /* Simple binary operations: VECc = foo (VECa, VECb). */
3820 static struct builtin_description bdesc_2arg[] =
3822 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
3823 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
3824 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
3825 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
3826 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
3827 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
3828 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
3829 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
3830 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
3831 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
3832 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
3833 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
3834 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
3835 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
3836 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
3837 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
3838 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
3839 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
3840 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
3841 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
3842 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
3843 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
3844 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
3845 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
3846 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
3847 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
3848 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
3849 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
3850 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
3851 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
3852 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
3853 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
3854 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
3855 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
3856 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
3857 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
3858 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
3859 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
3860 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
3861 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
3862 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
3863 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
3864 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
3865 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
3866 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
3867 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
3868 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
3869 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
3870 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
3871 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
3872 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
3873 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
3874 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
3875 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
3876 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
3877 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
3878 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
3879 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
3880 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
3881 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
3882 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
3883 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
3884 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
3885 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
3886 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
3887 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
3888 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
3889 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
3890 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
3891 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
3892 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
3893 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
3894 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
3895 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
3896 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
3897 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
3898 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
3899 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
3900 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
3901 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
3902 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
3903 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
3904 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
3905 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
3906 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
3907 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
3908 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
3909 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
3910 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
3911 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
3912 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
3913 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
3914 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
3915 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
3916 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
3917 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
3918 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
3919 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
3920 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
3921 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
3922 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
3923 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
3924 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
3925 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
3926 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
3927 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
3928 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
3929 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
3930 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
3931 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
3932 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
3933 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
3934 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
3936 /* Place holder, leave as first spe builtin. */
3937 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
3938 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
3939 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
3940 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
3941 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
3942 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
3943 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
3944 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
3945 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
3946 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
3947 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
3948 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
3949 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
3950 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
3951 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
3952 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
3953 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
3954 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
3955 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
3956 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
3957 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
3958 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
3959 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
3960 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
3961 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
3962 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
3963 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
3964 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
3965 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
3966 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
3967 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
3968 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
3969 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
3970 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
3971 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
3972 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
3973 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
3974 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
3975 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
3976 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
3977 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
3978 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
3979 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
3980 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
3981 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
3982 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
3983 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
3984 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
3985 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
3986 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
3987 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
3988 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
3989 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
3990 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
3991 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
3992 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
3993 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
3994 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
3995 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
3996 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
3997 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
3998 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
3999 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
4000 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
4001 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
4002 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
4003 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
4004 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
4005 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
4006 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
4007 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
4008 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
4009 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
4010 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
4011 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
4012 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
4013 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
4014 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
4015 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
4016 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
4017 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
4018 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
4019 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
4020 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
4021 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
4022 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
4023 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
4024 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
4025 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
4026 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
4027 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
4028 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
4029 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
4030 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
4031 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
4032 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
4033 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
4034 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
4035 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
4036 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
4037 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
4038 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
4039 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
4040 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
4041 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
4042 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
4043 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
4044 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
4045 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
4047 /* SPE binary operations expecting a 5-bit unsigned literal. */
4048 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
4050 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
4051 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
4052 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
4053 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
4054 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
4055 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
4056 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
4057 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
4058 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
4059 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
4060 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
4061 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
4062 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
4063 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
4064 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
4065 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
4066 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
4067 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
4068 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
4069 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
4070 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
4071 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
4072 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
4073 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
4074 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
4075 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
4077 /* Place-holder. Leave as last binary SPE builtin. */
4078 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR },
4081 /* AltiVec predicates. */
4083 struct builtin_description_predicates
4085 const unsigned int mask;
4086 const enum insn_code icode;
4087 const char *opcode;
4088 const char *const name;
4089 const enum rs6000_builtins code;
4092 static const struct builtin_description_predicates bdesc_altivec_preds[] =
4094 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
4095 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
4096 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
4097 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
4098 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
4099 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
4100 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
4101 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
4102 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
4103 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
4104 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
4105 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
4106 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
4109 /* SPE predicates. */
4110 static struct builtin_description bdesc_spe_predicates[] =
4112 /* Place-holder. Leave as first. */
4113 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
4114 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
4115 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
4116 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
4117 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
4118 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
4119 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
4120 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
4121 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
4122 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
4123 /* Place-holder. Leave as last. */
4124 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
4127 /* SPE evsel predicates. */
4128 static struct builtin_description bdesc_spe_evsel[] =
4130 /* Place-holder. Leave as first. */
4131 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
4132 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
4133 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
4134 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
4135 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
4136 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
4137 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
4138 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
4139 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
4140 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
4141 /* Place-holder. Leave as last. */
4142 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
4145 /* ABS* operations. */
4147 static const struct builtin_description bdesc_abs[] =
4149 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
4150 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
4151 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
4152 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
4153 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
4154 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
4155 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
4158 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
4159 foo (VECa). */
4161 static struct builtin_description bdesc_1arg[] =
4163 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
4164 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
4165 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
4166 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
4167 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
4168 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
4169 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
4170 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
4171 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
4172 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
4173 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
4174 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
4175 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
4176 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
4177 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
4178 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
4179 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
4181 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
4182 end with SPE_BUILTIN_EVSUBFUSIAAW. */
4183 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
4184 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
4185 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
4186 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
4187 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
4188 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
4189 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
4190 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
4191 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
4192 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
4193 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
4194 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
4195 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
4196 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
4197 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
4198 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
4199 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
4200 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
4201 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
4202 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
4203 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
4204 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
4205 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
4206 { 0, CODE_FOR_spe_evneg, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
4207 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
4208 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
4209 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
4210 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
4211 { 0, CODE_FOR_spe_evsplatfi, "__builtin_spe_evsplatfi", SPE_BUILTIN_EVSPLATFI },
4212 { 0, CODE_FOR_spe_evsplati, "__builtin_spe_evsplati", SPE_BUILTIN_EVSPLATI },
4214 /* Place-holder. Leave as last unary SPE builtin. */
4215 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
4218 static rtx
4219 rs6000_expand_unop_builtin (icode, arglist, target)
4220 enum insn_code icode;
4221 tree arglist;
4222 rtx target;
4224 rtx pat;
4225 tree arg0 = TREE_VALUE (arglist);
4226 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4227 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4228 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4230 if (icode == CODE_FOR_nothing)
4231 /* Builtin not supported on this processor. */
4232 return 0;
4234 /* If we got invalid arguments bail out before generating bad rtl. */
4235 if (arg0 == error_mark_node)
4236 return const0_rtx;
4238 if (icode == CODE_FOR_altivec_vspltisb
4239 || icode == CODE_FOR_altivec_vspltish
4240 || icode == CODE_FOR_altivec_vspltisw
4241 || icode == CODE_FOR_spe_evsplatfi
4242 || icode == CODE_FOR_spe_evsplati)
4244 /* Only allow 5-bit *signed* literals. */
4245 if (GET_CODE (op0) != CONST_INT
4246 || INTVAL (op0) > 0x1f
4247 || INTVAL (op0) < -0x1f)
4249 error ("argument 1 must be a 5-bit signed literal");
4250 return const0_rtx;
4254 if (target == 0
4255 || GET_MODE (target) != tmode
4256 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4257 target = gen_reg_rtx (tmode);
4259 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4260 op0 = copy_to_mode_reg (mode0, op0);
4262 pat = GEN_FCN (icode) (target, op0);
4263 if (! pat)
4264 return 0;
4265 emit_insn (pat);
4267 return target;
4270 static rtx
4271 altivec_expand_abs_builtin (icode, arglist, target)
4272 enum insn_code icode;
4273 tree arglist;
4274 rtx target;
4276 rtx pat, scratch1, scratch2;
4277 tree arg0 = TREE_VALUE (arglist);
4278 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4279 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4280 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4282 /* If we have invalid arguments, bail out before generating bad rtl. */
4283 if (arg0 == error_mark_node)
4284 return const0_rtx;
4286 if (target == 0
4287 || GET_MODE (target) != tmode
4288 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4289 target = gen_reg_rtx (tmode);
4291 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4292 op0 = copy_to_mode_reg (mode0, op0);
4294 scratch1 = gen_reg_rtx (mode0);
4295 scratch2 = gen_reg_rtx (mode0);
4297 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
4298 if (! pat)
4299 return 0;
4300 emit_insn (pat);
4302 return target;
4305 static rtx
4306 rs6000_expand_binop_builtin (icode, arglist, target)
4307 enum insn_code icode;
4308 tree arglist;
4309 rtx target;
4311 rtx pat;
4312 tree arg0 = TREE_VALUE (arglist);
4313 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4314 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4315 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4316 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4317 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4318 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4320 if (icode == CODE_FOR_nothing)
4321 /* Builtin not supported on this processor. */
4322 return 0;
4324 /* If we got invalid arguments bail out before generating bad rtl. */
4325 if (arg0 == error_mark_node || arg1 == error_mark_node)
4326 return const0_rtx;
4328 if (icode == CODE_FOR_altivec_vcfux
4329 || icode == CODE_FOR_altivec_vcfsx
4330 || icode == CODE_FOR_altivec_vctsxs
4331 || icode == CODE_FOR_altivec_vctuxs
4332 || icode == CODE_FOR_altivec_vspltb
4333 || icode == CODE_FOR_altivec_vsplth
4334 || icode == CODE_FOR_altivec_vspltw
4335 || icode == CODE_FOR_spe_evaddiw
4336 || icode == CODE_FOR_spe_evldd
4337 || icode == CODE_FOR_spe_evldh
4338 || icode == CODE_FOR_spe_evldw
4339 || icode == CODE_FOR_spe_evlhhesplat
4340 || icode == CODE_FOR_spe_evlhhossplat
4341 || icode == CODE_FOR_spe_evlhhousplat
4342 || icode == CODE_FOR_spe_evlwhe
4343 || icode == CODE_FOR_spe_evlwhos
4344 || icode == CODE_FOR_spe_evlwhou
4345 || icode == CODE_FOR_spe_evlwhsplat
4346 || icode == CODE_FOR_spe_evlwwsplat
4347 || icode == CODE_FOR_spe_evrlwi
4348 || icode == CODE_FOR_spe_evslwi
4349 || icode == CODE_FOR_spe_evsrwis
4350 || icode == CODE_FOR_spe_evsrwiu)
4352 /* Only allow 5-bit unsigned literals. */
4353 if (TREE_CODE (arg1) != INTEGER_CST
4354 || TREE_INT_CST_LOW (arg1) & ~0x1f)
4356 error ("argument 2 must be a 5-bit unsigned literal");
4357 return const0_rtx;
4361 if (target == 0
4362 || GET_MODE (target) != tmode
4363 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4364 target = gen_reg_rtx (tmode);
4366 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4367 op0 = copy_to_mode_reg (mode0, op0);
4368 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4369 op1 = copy_to_mode_reg (mode1, op1);
4371 pat = GEN_FCN (icode) (target, op0, op1);
4372 if (! pat)
4373 return 0;
4374 emit_insn (pat);
4376 return target;
4379 static rtx
4380 altivec_expand_predicate_builtin (icode, opcode, arglist, target)
4381 enum insn_code icode;
4382 const char *opcode;
4383 tree arglist;
4384 rtx target;
4386 rtx pat, scratch;
4387 tree cr6_form = TREE_VALUE (arglist);
4388 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
4389 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4390 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4391 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4392 enum machine_mode tmode = SImode;
4393 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4394 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4395 int cr6_form_int;
4397 if (TREE_CODE (cr6_form) != INTEGER_CST)
4399 error ("argument 1 of __builtin_altivec_predicate must be a constant");
4400 return const0_rtx;
4402 else
4403 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
4405 if (mode0 != mode1)
4406 abort ();
4408 /* If we have invalid arguments, bail out before generating bad rtl. */
4409 if (arg0 == error_mark_node || arg1 == error_mark_node)
4410 return const0_rtx;
4412 if (target == 0
4413 || GET_MODE (target) != tmode
4414 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4415 target = gen_reg_rtx (tmode);
4417 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4418 op0 = copy_to_mode_reg (mode0, op0);
4419 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4420 op1 = copy_to_mode_reg (mode1, op1);
4422 scratch = gen_reg_rtx (mode0);
4424 pat = GEN_FCN (icode) (scratch, op0, op1,
4425 gen_rtx (SYMBOL_REF, Pmode, opcode));
4426 if (! pat)
4427 return 0;
4428 emit_insn (pat);
4430 /* The vec_any* and vec_all* predicates use the same opcodes for two
4431 different operations, but the bits in CR6 will be different
4432 depending on what information we want. So we have to play tricks
4433 with CR6 to get the right bits out.
4435 If you think this is disgusting, look at the specs for the
4436 AltiVec predicates. */
4438 switch (cr6_form_int)
4440 case 0:
4441 emit_insn (gen_cr6_test_for_zero (target));
4442 break;
4443 case 1:
4444 emit_insn (gen_cr6_test_for_zero_reverse (target));
4445 break;
4446 case 2:
4447 emit_insn (gen_cr6_test_for_lt (target));
4448 break;
4449 case 3:
4450 emit_insn (gen_cr6_test_for_lt_reverse (target));
4451 break;
4452 default:
4453 error ("argument 1 of __builtin_altivec_predicate is out of range");
4454 break;
4457 return target;
4460 static rtx
4461 altivec_expand_stv_builtin (icode, arglist)
4462 enum insn_code icode;
4463 tree arglist;
4465 tree arg0 = TREE_VALUE (arglist);
4466 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4467 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4468 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4469 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4470 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4471 rtx pat;
4472 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
4473 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
4474 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
4476 /* Invalid arguments. Bail before doing anything stoopid! */
4477 if (arg0 == error_mark_node
4478 || arg1 == error_mark_node
4479 || arg2 == error_mark_node)
4480 return const0_rtx;
4482 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
4483 op0 = copy_to_mode_reg (mode2, op0);
4484 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
4485 op1 = copy_to_mode_reg (mode0, op1);
4486 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
4487 op2 = copy_to_mode_reg (mode1, op2);
4489 pat = GEN_FCN (icode) (op1, op2, op0);
4490 if (pat)
4491 emit_insn (pat);
4492 return NULL_RTX;
4495 static rtx
4496 rs6000_expand_ternop_builtin (icode, arglist, target)
4497 enum insn_code icode;
4498 tree arglist;
4499 rtx target;
4501 rtx pat;
4502 tree arg0 = TREE_VALUE (arglist);
4503 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4504 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4505 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4506 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4507 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4508 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4509 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4510 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4511 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
4513 if (icode == CODE_FOR_nothing)
4514 /* Builtin not supported on this processor. */
4515 return 0;
4517 /* If we got invalid arguments bail out before generating bad rtl. */
4518 if (arg0 == error_mark_node
4519 || arg1 == error_mark_node
4520 || arg2 == error_mark_node)
4521 return const0_rtx;
4523 if (icode == CODE_FOR_altivec_vsldoi_4sf
4524 || icode == CODE_FOR_altivec_vsldoi_4si
4525 || icode == CODE_FOR_altivec_vsldoi_8hi
4526 || icode == CODE_FOR_altivec_vsldoi_16qi)
4528 /* Only allow 4-bit unsigned literals. */
4529 if (TREE_CODE (arg2) != INTEGER_CST
4530 || TREE_INT_CST_LOW (arg2) & ~0xf)
4532 error ("argument 3 must be a 4-bit unsigned literal");
4533 return const0_rtx;
4537 if (target == 0
4538 || GET_MODE (target) != tmode
4539 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4540 target = gen_reg_rtx (tmode);
4542 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4543 op0 = copy_to_mode_reg (mode0, op0);
4544 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4545 op1 = copy_to_mode_reg (mode1, op1);
4546 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
4547 op2 = copy_to_mode_reg (mode2, op2);
4549 pat = GEN_FCN (icode) (target, op0, op1, op2);
4550 if (! pat)
4551 return 0;
4552 emit_insn (pat);
4554 return target;
4557 /* Expand the lvx builtins. */
4558 static rtx
4559 altivec_expand_ld_builtin (exp, target, expandedp)
4560 tree exp;
4561 rtx target;
4562 bool *expandedp;
4564 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4565 tree arglist = TREE_OPERAND (exp, 1);
4566 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4567 tree arg0;
4568 enum machine_mode tmode, mode0;
4569 rtx pat, op0;
4570 enum insn_code icode;
4572 switch (fcode)
4574 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
4575 icode = CODE_FOR_altivec_lvx_16qi;
4576 break;
4577 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
4578 icode = CODE_FOR_altivec_lvx_8hi;
4579 break;
4580 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
4581 icode = CODE_FOR_altivec_lvx_4si;
4582 break;
4583 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
4584 icode = CODE_FOR_altivec_lvx_4sf;
4585 break;
4586 default:
4587 *expandedp = false;
4588 return NULL_RTX;
4591 *expandedp = true;
4593 arg0 = TREE_VALUE (arglist);
4594 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4595 tmode = insn_data[icode].operand[0].mode;
4596 mode0 = insn_data[icode].operand[1].mode;
4598 if (target == 0
4599 || GET_MODE (target) != tmode
4600 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4601 target = gen_reg_rtx (tmode);
4603 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4604 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
4606 pat = GEN_FCN (icode) (target, op0);
4607 if (! pat)
4608 return 0;
4609 emit_insn (pat);
4610 return target;
4613 /* Expand the stvx builtins. */
4614 static rtx
4615 altivec_expand_st_builtin (exp, target, expandedp)
4616 tree exp;
4617 rtx target ATTRIBUTE_UNUSED;
4618 bool *expandedp;
4620 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4621 tree arglist = TREE_OPERAND (exp, 1);
4622 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4623 tree arg0, arg1;
4624 enum machine_mode mode0, mode1;
4625 rtx pat, op0, op1;
4626 enum insn_code icode;
4628 switch (fcode)
4630 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
4631 icode = CODE_FOR_altivec_stvx_16qi;
4632 break;
4633 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
4634 icode = CODE_FOR_altivec_stvx_8hi;
4635 break;
4636 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
4637 icode = CODE_FOR_altivec_stvx_4si;
4638 break;
4639 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
4640 icode = CODE_FOR_altivec_stvx_4sf;
4641 break;
4642 default:
4643 *expandedp = false;
4644 return NULL_RTX;
4647 arg0 = TREE_VALUE (arglist);
4648 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4649 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4650 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4651 mode0 = insn_data[icode].operand[0].mode;
4652 mode1 = insn_data[icode].operand[1].mode;
4654 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4655 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
4656 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
4657 op1 = copy_to_mode_reg (mode1, op1);
4659 pat = GEN_FCN (icode) (op0, op1);
4660 if (pat)
4661 emit_insn (pat);
4663 *expandedp = true;
4664 return NULL_RTX;
4667 /* Expand the dst builtins. */
4668 static rtx
4669 altivec_expand_dst_builtin (exp, target, expandedp)
4670 tree exp;
4671 rtx target ATTRIBUTE_UNUSED;
4672 bool *expandedp;
4674 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4675 tree arglist = TREE_OPERAND (exp, 1);
4676 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4677 tree arg0, arg1, arg2;
4678 enum machine_mode mode0, mode1, mode2;
4679 rtx pat, op0, op1, op2;
4680 struct builtin_description *d;
4681 size_t i;
4683 *expandedp = false;
4685 /* Handle DST variants. */
4686 d = (struct builtin_description *) bdesc_dst;
4687 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
4688 if (d->code == fcode)
4690 arg0 = TREE_VALUE (arglist);
4691 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4692 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4693 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4694 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4695 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4696 mode0 = insn_data[d->icode].operand[0].mode;
4697 mode1 = insn_data[d->icode].operand[1].mode;
4698 mode2 = insn_data[d->icode].operand[2].mode;
4700 /* Invalid arguments, bail out before generating bad rtl. */
4701 if (arg0 == error_mark_node
4702 || arg1 == error_mark_node
4703 || arg2 == error_mark_node)
4704 return const0_rtx;
4706 if (TREE_CODE (arg2) != INTEGER_CST
4707 || TREE_INT_CST_LOW (arg2) & ~0x3)
4709 error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
4710 return const0_rtx;
4713 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
4714 op0 = copy_to_mode_reg (mode0, op0);
4715 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
4716 op1 = copy_to_mode_reg (mode1, op1);
4718 pat = GEN_FCN (d->icode) (op0, op1, op2);
4719 if (pat != 0)
4720 emit_insn (pat);
4722 *expandedp = true;
4723 return NULL_RTX;
4726 return NULL_RTX;
4729 /* Expand the builtin in EXP and store the result in TARGET. Store
4730 true in *EXPANDEDP if we found a builtin to expand. */
4731 static rtx
4732 altivec_expand_builtin (exp, target, expandedp)
4733 tree exp;
4734 rtx target;
4735 bool *expandedp;
4737 struct builtin_description *d;
4738 struct builtin_description_predicates *dp;
4739 size_t i;
4740 enum insn_code icode;
4741 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4742 tree arglist = TREE_OPERAND (exp, 1);
4743 tree arg0;
4744 rtx op0, pat;
4745 enum machine_mode tmode, mode0;
4746 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4748 target = altivec_expand_ld_builtin (exp, target, expandedp);
4749 if (*expandedp)
4750 return target;
4752 target = altivec_expand_st_builtin (exp, target, expandedp);
4753 if (*expandedp)
4754 return target;
4756 target = altivec_expand_dst_builtin (exp, target, expandedp);
4757 if (*expandedp)
4758 return target;
4760 *expandedp = true;
4762 switch (fcode)
4764 case ALTIVEC_BUILTIN_STVX:
4765 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
4766 case ALTIVEC_BUILTIN_STVEBX:
4767 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
4768 case ALTIVEC_BUILTIN_STVEHX:
4769 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
4770 case ALTIVEC_BUILTIN_STVEWX:
4771 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
4772 case ALTIVEC_BUILTIN_STVXL:
4773 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
4775 case ALTIVEC_BUILTIN_MFVSCR:
4776 icode = CODE_FOR_altivec_mfvscr;
4777 tmode = insn_data[icode].operand[0].mode;
4779 if (target == 0
4780 || GET_MODE (target) != tmode
4781 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4782 target = gen_reg_rtx (tmode);
4784 pat = GEN_FCN (icode) (target);
4785 if (! pat)
4786 return 0;
4787 emit_insn (pat);
4788 return target;
4790 case ALTIVEC_BUILTIN_MTVSCR:
4791 icode = CODE_FOR_altivec_mtvscr;
4792 arg0 = TREE_VALUE (arglist);
4793 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4794 mode0 = insn_data[icode].operand[0].mode;
4796 /* If we got invalid arguments bail out before generating bad rtl. */
4797 if (arg0 == error_mark_node)
4798 return const0_rtx;
4800 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4801 op0 = copy_to_mode_reg (mode0, op0);
4803 pat = GEN_FCN (icode) (op0);
4804 if (pat)
4805 emit_insn (pat);
4806 return NULL_RTX;
4808 case ALTIVEC_BUILTIN_DSSALL:
4809 emit_insn (gen_altivec_dssall ());
4810 return NULL_RTX;
4812 case ALTIVEC_BUILTIN_DSS:
4813 icode = CODE_FOR_altivec_dss;
4814 arg0 = TREE_VALUE (arglist);
4815 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4816 mode0 = insn_data[icode].operand[0].mode;
4818 /* If we got invalid arguments bail out before generating bad rtl. */
4819 if (arg0 == error_mark_node)
4820 return const0_rtx;
4822 if (TREE_CODE (arg0) != INTEGER_CST
4823 || TREE_INT_CST_LOW (arg0) & ~0x3)
4825 error ("argument to dss must be a 2-bit unsigned literal");
4826 return const0_rtx;
4829 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4830 op0 = copy_to_mode_reg (mode0, op0);
4832 emit_insn (gen_altivec_dss (op0));
4833 return NULL_RTX;
4836 /* Expand abs* operations. */
4837 d = (struct builtin_description *) bdesc_abs;
4838 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
4839 if (d->code == fcode)
4840 return altivec_expand_abs_builtin (d->icode, arglist, target);
4842 /* Expand the AltiVec predicates. */
4843 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
4844 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
4845 if (dp->code == fcode)
4846 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
4848 /* LV* are funky. We initialized them differently. */
4849 switch (fcode)
4851 case ALTIVEC_BUILTIN_LVSL:
4852 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsl,
4853 arglist, target);
4854 case ALTIVEC_BUILTIN_LVSR:
4855 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsr,
4856 arglist, target);
4857 case ALTIVEC_BUILTIN_LVEBX:
4858 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvebx,
4859 arglist, target);
4860 case ALTIVEC_BUILTIN_LVEHX:
4861 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvehx,
4862 arglist, target);
4863 case ALTIVEC_BUILTIN_LVEWX:
4864 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvewx,
4865 arglist, target);
4866 case ALTIVEC_BUILTIN_LVXL:
4867 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvxl,
4868 arglist, target);
4869 case ALTIVEC_BUILTIN_LVX:
4870 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvx,
4871 arglist, target);
4872 default:
4873 break;
4874 /* Fall through. */
4877 *expandedp = false;
4878 return NULL_RTX;
4881 /* Binops that need to be initialized manually, but can be expanded
4882 automagically by rs6000_expand_binop_builtin. */
4883 static struct builtin_description bdesc_2arg_spe[] =
4885 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
4886 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
4887 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
4888 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
4889 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
4890 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
4891 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
4892 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
4893 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
4894 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
4895 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
4896 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
4897 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
4898 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
4899 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
4900 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
4901 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
4902 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
4903 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
4904 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
4905 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
4906 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
4909 /* Expand the builtin in EXP and store the result in TARGET. Store
4910 true in *EXPANDEDP if we found a builtin to expand.
4912 This expands the SPE builtins that are not simple unary and binary
4913 operations. */
4914 static rtx
4915 spe_expand_builtin (exp, target, expandedp)
4916 tree exp;
4917 rtx target;
4918 bool *expandedp;
4920 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4921 tree arglist = TREE_OPERAND (exp, 1);
4922 tree arg1, arg0;
4923 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4924 enum insn_code icode;
4925 enum machine_mode tmode, mode0;
4926 rtx pat, op0;
4927 struct builtin_description *d;
4928 size_t i;
4930 *expandedp = true;
4932 /* Syntax check for a 5-bit unsigned immediate. */
4933 switch (fcode)
4935 case SPE_BUILTIN_EVSTDD:
4936 case SPE_BUILTIN_EVSTDH:
4937 case SPE_BUILTIN_EVSTDW:
4938 case SPE_BUILTIN_EVSTWHE:
4939 case SPE_BUILTIN_EVSTWHO:
4940 case SPE_BUILTIN_EVSTWWE:
4941 case SPE_BUILTIN_EVSTWWO:
4942 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4943 if (TREE_CODE (arg1) != INTEGER_CST
4944 || TREE_INT_CST_LOW (arg1) & ~0x1f)
4946 error ("argument 2 must be a 5-bit unsigned literal");
4947 return const0_rtx;
4949 break;
4950 default:
4951 break;
4954 d = (struct builtin_description *) bdesc_2arg_spe;
4955 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
4956 if (d->code == fcode)
4957 return rs6000_expand_binop_builtin (d->icode, arglist, target);
4959 d = (struct builtin_description *) bdesc_spe_predicates;
4960 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
4961 if (d->code == fcode)
4962 return spe_expand_predicate_builtin (d->icode, arglist, target);
4964 d = (struct builtin_description *) bdesc_spe_evsel;
4965 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
4966 if (d->code == fcode)
4967 return spe_expand_evsel_builtin (d->icode, arglist, target);
4969 switch (fcode)
4971 case SPE_BUILTIN_EVSTDDX:
4972 return altivec_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
4973 case SPE_BUILTIN_EVSTDHX:
4974 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
4975 case SPE_BUILTIN_EVSTDWX:
4976 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
4977 case SPE_BUILTIN_EVSTWHEX:
4978 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
4979 case SPE_BUILTIN_EVSTWHOX:
4980 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
4981 case SPE_BUILTIN_EVSTWWEX:
4982 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
4983 case SPE_BUILTIN_EVSTWWOX:
4984 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
4985 case SPE_BUILTIN_EVSTDD:
4986 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
4987 case SPE_BUILTIN_EVSTDH:
4988 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
4989 case SPE_BUILTIN_EVSTDW:
4990 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
4991 case SPE_BUILTIN_EVSTWHE:
4992 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
4993 case SPE_BUILTIN_EVSTWHO:
4994 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
4995 case SPE_BUILTIN_EVSTWWE:
4996 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
4997 case SPE_BUILTIN_EVSTWWO:
4998 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
4999 case SPE_BUILTIN_MFSPEFSCR:
5000 icode = CODE_FOR_spe_mfspefscr;
5001 tmode = insn_data[icode].operand[0].mode;
5003 if (target == 0
5004 || GET_MODE (target) != tmode
5005 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5006 target = gen_reg_rtx (tmode);
5008 pat = GEN_FCN (icode) (target);
5009 if (! pat)
5010 return 0;
5011 emit_insn (pat);
5012 return target;
5013 case SPE_BUILTIN_MTSPEFSCR:
5014 icode = CODE_FOR_spe_mtspefscr;
5015 arg0 = TREE_VALUE (arglist);
5016 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5017 mode0 = insn_data[icode].operand[0].mode;
5019 if (arg0 == error_mark_node)
5020 return const0_rtx;
5022 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5023 op0 = copy_to_mode_reg (mode0, op0);
5025 pat = GEN_FCN (icode) (op0);
5026 if (pat)
5027 emit_insn (pat);
5028 return NULL_RTX;
5029 default:
5030 break;
5033 *expandedp = false;
5034 return NULL_RTX;
5037 static rtx
5038 spe_expand_predicate_builtin (icode, arglist, target)
5039 enum insn_code icode;
5040 tree arglist;
5041 rtx target;
5043 rtx pat, scratch, tmp;
5044 tree form = TREE_VALUE (arglist);
5045 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
5046 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5047 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5048 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5049 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5050 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5051 int form_int;
5052 enum rtx_code code;
5054 if (TREE_CODE (form) != INTEGER_CST)
5056 error ("argument 1 of __builtin_spe_predicate must be a constant");
5057 return const0_rtx;
5059 else
5060 form_int = TREE_INT_CST_LOW (form);
5062 if (mode0 != mode1)
5063 abort ();
5065 if (arg0 == error_mark_node || arg1 == error_mark_node)
5066 return const0_rtx;
5068 if (target == 0
5069 || GET_MODE (target) != SImode
5070 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
5071 target = gen_reg_rtx (SImode);
5073 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5074 op0 = copy_to_mode_reg (mode0, op0);
5075 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5076 op1 = copy_to_mode_reg (mode1, op1);
5078 scratch = gen_reg_rtx (CCmode);
5080 pat = GEN_FCN (icode) (scratch, op0, op1);
5081 if (! pat)
5082 return const0_rtx;
5083 emit_insn (pat);
5085 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
5086 _lower_. We use one compare, but look in different bits of the
5087 CR for each variant.
5089 There are 2 elements in each SPE simd type (upper/lower). The CR
5090 bits are set as follows:
5092 BIT0 | BIT 1 | BIT 2 | BIT 3
5093 U | L | (U | L) | (U & L)
5095 So, for an "all" relationship, BIT 3 would be set.
5096 For an "any" relationship, BIT 2 would be set. Etc.
5098 Following traditional nomenclature, these bits map to:
5100 BIT0 | BIT 1 | BIT 2 | BIT 3
5101 LT | GT | EQ | OV
5103 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
5106 switch (form_int)
5108 /* All variant. OV bit. */
5109 case 0:
5110 /* We need to get to the OV bit, which is the ORDERED bit. We
5111 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
5112 that's ugly and will trigger a validate_condition_mode abort.
5113 So let's just use another pattern. */
5114 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
5115 return target;
5116 /* Any variant. EQ bit. */
5117 case 1:
5118 code = EQ;
5119 break;
5120 /* Upper variant. LT bit. */
5121 case 2:
5122 code = LT;
5123 break;
5124 /* Lower variant. GT bit. */
5125 case 3:
5126 code = GT;
5127 break;
5128 default:
5129 error ("argument 1 of __builtin_spe_predicate is out of range");
5130 return const0_rtx;
5133 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
5134 emit_move_insn (target, tmp);
5136 return target;
5139 /* The evsel builtins look like this:
5141 e = __builtin_spe_evsel_OP (a, b, c, d);
5143 and work like this:
5145 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
5146 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
5149 static rtx
5150 spe_expand_evsel_builtin (icode, arglist, target)
5151 enum insn_code icode;
5152 tree arglist;
5153 rtx target;
5155 rtx pat, scratch;
5156 tree arg0 = TREE_VALUE (arglist);
5157 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5158 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5159 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
5160 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5161 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5162 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5163 rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
5164 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5165 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5167 if (mode0 != mode1)
5168 abort ();
5170 if (arg0 == error_mark_node || arg1 == error_mark_node
5171 || arg2 == error_mark_node || arg3 == error_mark_node)
5172 return const0_rtx;
5174 if (target == 0
5175 || GET_MODE (target) != mode0
5176 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
5177 target = gen_reg_rtx (mode0);
5179 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5180 op0 = copy_to_mode_reg (mode0, op0);
5181 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
5182 op1 = copy_to_mode_reg (mode0, op1);
5183 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
5184 op2 = copy_to_mode_reg (mode0, op2);
5185 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
5186 op3 = copy_to_mode_reg (mode0, op3);
5188 /* Generate the compare. */
5189 scratch = gen_reg_rtx (CCmode);
5190 pat = GEN_FCN (icode) (scratch, op0, op1);
5191 if (! pat)
5192 return const0_rtx;
5193 emit_insn (pat);
5195 if (mode0 == V2SImode)
5196 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
5197 else
5198 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
5200 return target;
5203 /* Expand an expression EXP that calls a built-in function,
5204 with result going to TARGET if that's convenient
5205 (and in mode MODE if that's convenient).
5206 SUBTARGET may be used as the target for computing one of EXP's operands.
5207 IGNORE is nonzero if the value is to be ignored. */
5209 static rtx
5210 rs6000_expand_builtin (exp, target, subtarget, mode, ignore)
5211 tree exp;
5212 rtx target;
5213 rtx subtarget ATTRIBUTE_UNUSED;
5214 enum machine_mode mode ATTRIBUTE_UNUSED;
5215 int ignore ATTRIBUTE_UNUSED;
5217 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5218 tree arglist = TREE_OPERAND (exp, 1);
5219 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5220 struct builtin_description *d;
5221 size_t i;
5222 rtx ret;
5223 bool success;
5225 if (TARGET_ALTIVEC)
5227 ret = altivec_expand_builtin (exp, target, &success);
5229 if (success)
5230 return ret;
5232 if (TARGET_SPE)
5234 ret = spe_expand_builtin (exp, target, &success);
5236 if (success)
5237 return ret;
5240 if (TARGET_ALTIVEC || TARGET_SPE)
5242 /* Handle simple unary operations. */
5243 d = (struct builtin_description *) bdesc_1arg;
5244 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
5245 if (d->code == fcode)
5246 return rs6000_expand_unop_builtin (d->icode, arglist, target);
5248 /* Handle simple binary operations. */
5249 d = (struct builtin_description *) bdesc_2arg;
5250 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
5251 if (d->code == fcode)
5252 return rs6000_expand_binop_builtin (d->icode, arglist, target);
5254 /* Handle simple ternary operations. */
5255 d = (struct builtin_description *) bdesc_3arg;
5256 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
5257 if (d->code == fcode)
5258 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
5261 abort ();
5262 return NULL_RTX;
5265 static void
5266 rs6000_init_builtins ()
5268 if (TARGET_SPE)
5269 spe_init_builtins ();
5270 if (TARGET_ALTIVEC)
5271 altivec_init_builtins ();
5272 if (TARGET_ALTIVEC || TARGET_SPE)
5273 rs6000_common_init_builtins ();
5276 /* Search through a set of builtins and enable the mask bits.
5277 DESC is an array of builtins.
5278 SIZE is the total number of builtins.
5279 START is the builtin enum at which to start.
5280 END is the builtin enum at which to end. */
5281 static void
5282 enable_mask_for_builtins (desc, size, start, end)
5283 struct builtin_description *desc;
5284 int size;
5285 enum rs6000_builtins start, end;
5287 int i;
5289 for (i = 0; i < size; ++i)
5290 if (desc[i].code == start)
5291 break;
5293 if (i == size)
5294 return;
5296 for (; i < size; ++i)
5298 /* Flip all the bits on. */
5299 desc[i].mask = target_flags;
5300 if (desc[i].code == end)
5301 break;
5305 static void
5306 spe_init_builtins ()
5308 tree endlink = void_list_node;
5309 tree puint_type_node = build_pointer_type (unsigned_type_node);
5310 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
5311 tree pv2si_type_node = build_pointer_type (V2SI_type_node);
5312 struct builtin_description *d;
5313 size_t i;
5315 tree v2si_ftype_4_v2si
5316 = build_function_type
5317 (V2SI_type_node,
5318 tree_cons (NULL_TREE, V2SI_type_node,
5319 tree_cons (NULL_TREE, V2SI_type_node,
5320 tree_cons (NULL_TREE, V2SI_type_node,
5321 tree_cons (NULL_TREE, V2SI_type_node,
5322 endlink)))));
5324 tree v2sf_ftype_4_v2sf
5325 = build_function_type
5326 (V2SF_type_node,
5327 tree_cons (NULL_TREE, V2SF_type_node,
5328 tree_cons (NULL_TREE, V2SF_type_node,
5329 tree_cons (NULL_TREE, V2SF_type_node,
5330 tree_cons (NULL_TREE, V2SF_type_node,
5331 endlink)))));
5333 tree int_ftype_int_v2si_v2si
5334 = build_function_type
5335 (integer_type_node,
5336 tree_cons (NULL_TREE, integer_type_node,
5337 tree_cons (NULL_TREE, V2SI_type_node,
5338 tree_cons (NULL_TREE, V2SI_type_node,
5339 endlink))));
5341 tree int_ftype_int_v2sf_v2sf
5342 = build_function_type
5343 (integer_type_node,
5344 tree_cons (NULL_TREE, integer_type_node,
5345 tree_cons (NULL_TREE, V2SF_type_node,
5346 tree_cons (NULL_TREE, V2SF_type_node,
5347 endlink))));
5349 tree void_ftype_v2si_puint_int
5350 = build_function_type (void_type_node,
5351 tree_cons (NULL_TREE, V2SI_type_node,
5352 tree_cons (NULL_TREE, puint_type_node,
5353 tree_cons (NULL_TREE,
5354 integer_type_node,
5355 endlink))));
5357 tree void_ftype_v2si_puint_char
5358 = build_function_type (void_type_node,
5359 tree_cons (NULL_TREE, V2SI_type_node,
5360 tree_cons (NULL_TREE, puint_type_node,
5361 tree_cons (NULL_TREE,
5362 char_type_node,
5363 endlink))));
5365 tree void_ftype_v2si_pv2si_int
5366 = build_function_type (void_type_node,
5367 tree_cons (NULL_TREE, V2SI_type_node,
5368 tree_cons (NULL_TREE, pv2si_type_node,
5369 tree_cons (NULL_TREE,
5370 integer_type_node,
5371 endlink))));
5373 tree void_ftype_v2si_pv2si_char
5374 = build_function_type (void_type_node,
5375 tree_cons (NULL_TREE, V2SI_type_node,
5376 tree_cons (NULL_TREE, pv2si_type_node,
5377 tree_cons (NULL_TREE,
5378 char_type_node,
5379 endlink))));
5381 tree void_ftype_int
5382 = build_function_type (void_type_node,
5383 tree_cons (NULL_TREE, integer_type_node, endlink));
5385 tree int_ftype_void
5386 = build_function_type (integer_type_node,
5387 tree_cons (NULL_TREE, void_type_node, endlink));
5389 tree v2si_ftype_pv2si_int
5390 = build_function_type (V2SI_type_node,
5391 tree_cons (NULL_TREE, pv2si_type_node,
5392 tree_cons (NULL_TREE, integer_type_node,
5393 endlink)));
5395 tree v2si_ftype_puint_int
5396 = build_function_type (V2SI_type_node,
5397 tree_cons (NULL_TREE, puint_type_node,
5398 tree_cons (NULL_TREE, integer_type_node,
5399 endlink)));
5401 tree v2si_ftype_pushort_int
5402 = build_function_type (V2SI_type_node,
5403 tree_cons (NULL_TREE, pushort_type_node,
5404 tree_cons (NULL_TREE, integer_type_node,
5405 endlink)));
5407 /* The initialization of the simple binary and unary builtins is
5408 done in rs6000_common_init_builtins, but we have to enable the
5409 mask bits here manually because we have run out of `target_flags'
5410 bits. We really need to redesign this mask business. */
5412 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
5413 ARRAY_SIZE (bdesc_2arg),
5414 SPE_BUILTIN_EVADDW,
5415 SPE_BUILTIN_EVXOR);
5416 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
5417 ARRAY_SIZE (bdesc_1arg),
5418 SPE_BUILTIN_EVABS,
5419 SPE_BUILTIN_EVSUBFUSIAAW);
5420 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
5421 ARRAY_SIZE (bdesc_spe_predicates),
5422 SPE_BUILTIN_EVCMPEQ,
5423 SPE_BUILTIN_EVFSTSTLT);
5424 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
5425 ARRAY_SIZE (bdesc_spe_evsel),
5426 SPE_BUILTIN_EVSEL_CMPGTS,
5427 SPE_BUILTIN_EVSEL_FSTSTEQ);
5429 /* Initialize irregular SPE builtins. */
5431 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
5432 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
5433 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
5434 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
5435 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
5436 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
5437 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
5438 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
5439 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
5440 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
5441 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
5442 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
5443 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
5444 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
5445 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
5446 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
5448 /* Loads. */
5449 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
5450 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
5451 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
5452 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
5453 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
5454 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
5455 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
5456 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
5457 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
5458 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
5459 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
5460 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
5461 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
5462 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
5463 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
5464 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
5465 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
5466 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
5467 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
5468 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
5469 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
5470 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
5472 /* Predicates. */
5473 d = (struct builtin_description *) bdesc_spe_predicates;
5474 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
5476 tree type;
5478 switch (insn_data[d->icode].operand[1].mode)
5480 case V2SImode:
5481 type = int_ftype_int_v2si_v2si;
5482 break;
5483 case V2SFmode:
5484 type = int_ftype_int_v2sf_v2sf;
5485 break;
5486 default:
5487 abort ();
5490 def_builtin (d->mask, d->name, type, d->code);
5493 /* Evsel predicates. */
5494 d = (struct builtin_description *) bdesc_spe_evsel;
5495 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
5497 tree type;
5499 switch (insn_data[d->icode].operand[1].mode)
5501 case V2SImode:
5502 type = v2si_ftype_4_v2si;
5503 break;
5504 case V2SFmode:
5505 type = v2sf_ftype_4_v2sf;
5506 break;
5507 default:
5508 abort ();
5511 def_builtin (d->mask, d->name, type, d->code);
5515 static void
5516 altivec_init_builtins ()
5518 struct builtin_description *d;
5519 struct builtin_description_predicates *dp;
5520 size_t i;
5521 tree pfloat_type_node = build_pointer_type (float_type_node);
5522 tree pint_type_node = build_pointer_type (integer_type_node);
5523 tree pshort_type_node = build_pointer_type (short_integer_type_node);
5524 tree pchar_type_node = build_pointer_type (char_type_node);
5526 tree pvoid_type_node = build_pointer_type (void_type_node);
5528 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
5529 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
5530 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
5531 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
5533 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
5535 tree int_ftype_int_v4si_v4si
5536 = build_function_type_list (integer_type_node,
5537 integer_type_node, V4SI_type_node,
5538 V4SI_type_node, NULL_TREE);
5539 tree v4sf_ftype_pcfloat
5540 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
5541 tree void_ftype_pfloat_v4sf
5542 = build_function_type_list (void_type_node,
5543 pfloat_type_node, V4SF_type_node, NULL_TREE);
5544 tree v4si_ftype_pcint
5545 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
5546 tree void_ftype_pint_v4si
5547 = build_function_type_list (void_type_node,
5548 pint_type_node, V4SI_type_node, NULL_TREE);
5549 tree v8hi_ftype_pcshort
5550 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
5551 tree void_ftype_pshort_v8hi
5552 = build_function_type_list (void_type_node,
5553 pshort_type_node, V8HI_type_node, NULL_TREE);
5554 tree v16qi_ftype_pcchar
5555 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
5556 tree void_ftype_pchar_v16qi
5557 = build_function_type_list (void_type_node,
5558 pchar_type_node, V16QI_type_node, NULL_TREE);
5559 tree void_ftype_v4si
5560 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
5561 tree v8hi_ftype_void
5562 = build_function_type (V8HI_type_node, void_list_node);
5563 tree void_ftype_void
5564 = build_function_type (void_type_node, void_list_node);
5565 tree void_ftype_qi
5566 = build_function_type_list (void_type_node, char_type_node, NULL_TREE);
5568 tree v16qi_ftype_int_pcvoid
5569 = build_function_type_list (V16QI_type_node,
5570 integer_type_node, pcvoid_type_node, NULL_TREE);
5571 tree v8hi_ftype_int_pcvoid
5572 = build_function_type_list (V8HI_type_node,
5573 integer_type_node, pcvoid_type_node, NULL_TREE);
5574 tree v4si_ftype_int_pcvoid
5575 = build_function_type_list (V4SI_type_node,
5576 integer_type_node, pcvoid_type_node, NULL_TREE);
5578 tree void_ftype_v4si_int_pvoid
5579 = build_function_type_list (void_type_node,
5580 V4SI_type_node, integer_type_node,
5581 pvoid_type_node, NULL_TREE);
5582 tree void_ftype_v16qi_int_pvoid
5583 = build_function_type_list (void_type_node,
5584 V16QI_type_node, integer_type_node,
5585 pvoid_type_node, NULL_TREE);
5586 tree void_ftype_v8hi_int_pvoid
5587 = build_function_type_list (void_type_node,
5588 V8HI_type_node, integer_type_node,
5589 pvoid_type_node, NULL_TREE);
5590 tree int_ftype_int_v8hi_v8hi
5591 = build_function_type_list (integer_type_node,
5592 integer_type_node, V8HI_type_node,
5593 V8HI_type_node, NULL_TREE);
5594 tree int_ftype_int_v16qi_v16qi
5595 = build_function_type_list (integer_type_node,
5596 integer_type_node, V16QI_type_node,
5597 V16QI_type_node, NULL_TREE);
5598 tree int_ftype_int_v4sf_v4sf
5599 = build_function_type_list (integer_type_node,
5600 integer_type_node, V4SF_type_node,
5601 V4SF_type_node, NULL_TREE);
5602 tree v4si_ftype_v4si
5603 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
5604 tree v8hi_ftype_v8hi
5605 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
5606 tree v16qi_ftype_v16qi
5607 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
5608 tree v4sf_ftype_v4sf
5609 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
5610 tree void_ftype_pcvoid_int_char
5611 = build_function_type_list (void_type_node,
5612 pcvoid_type_node, integer_type_node,
5613 char_type_node, NULL_TREE);
5615 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
5616 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
5617 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
5618 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
5619 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
5620 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
5621 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
5622 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
5623 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
5624 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
5625 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
5626 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
5627 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
5628 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
5629 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
5630 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
5631 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
5632 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
5633 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
5634 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
5635 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVSL);
5636 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVSR);
5637 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEBX);
5638 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEHX);
5639 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEWX);
5640 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVXL);
5641 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVX);
5642 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVX);
5643 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVEWX);
5644 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVXL);
5645 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid, ALTIVEC_BUILTIN_STVEBX);
5646 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid, ALTIVEC_BUILTIN_STVEHX);
5648 /* Add the DST variants. */
5649 d = (struct builtin_description *) bdesc_dst;
5650 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
5651 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_char, d->code);
5653 /* Initialize the predicates. */
5654 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
5655 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
5657 enum machine_mode mode1;
5658 tree type;
5660 mode1 = insn_data[dp->icode].operand[1].mode;
5662 switch (mode1)
5664 case V4SImode:
5665 type = int_ftype_int_v4si_v4si;
5666 break;
5667 case V8HImode:
5668 type = int_ftype_int_v8hi_v8hi;
5669 break;
5670 case V16QImode:
5671 type = int_ftype_int_v16qi_v16qi;
5672 break;
5673 case V4SFmode:
5674 type = int_ftype_int_v4sf_v4sf;
5675 break;
5676 default:
5677 abort ();
5680 def_builtin (dp->mask, dp->name, type, dp->code);
5683 /* Initialize the abs* operators. */
5684 d = (struct builtin_description *) bdesc_abs;
5685 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
5687 enum machine_mode mode0;
5688 tree type;
5690 mode0 = insn_data[d->icode].operand[0].mode;
5692 switch (mode0)
5694 case V4SImode:
5695 type = v4si_ftype_v4si;
5696 break;
5697 case V8HImode:
5698 type = v8hi_ftype_v8hi;
5699 break;
5700 case V16QImode:
5701 type = v16qi_ftype_v16qi;
5702 break;
5703 case V4SFmode:
5704 type = v4sf_ftype_v4sf;
5705 break;
5706 default:
5707 abort ();
5710 def_builtin (d->mask, d->name, type, d->code);
5714 static void
5715 rs6000_common_init_builtins ()
5717 struct builtin_description *d;
5718 size_t i;
5720 tree v4sf_ftype_v4sf_v4sf_v16qi
5721 = build_function_type_list (V4SF_type_node,
5722 V4SF_type_node, V4SF_type_node,
5723 V16QI_type_node, NULL_TREE);
5724 tree v4si_ftype_v4si_v4si_v16qi
5725 = build_function_type_list (V4SI_type_node,
5726 V4SI_type_node, V4SI_type_node,
5727 V16QI_type_node, NULL_TREE);
5728 tree v8hi_ftype_v8hi_v8hi_v16qi
5729 = build_function_type_list (V8HI_type_node,
5730 V8HI_type_node, V8HI_type_node,
5731 V16QI_type_node, NULL_TREE);
5732 tree v16qi_ftype_v16qi_v16qi_v16qi
5733 = build_function_type_list (V16QI_type_node,
5734 V16QI_type_node, V16QI_type_node,
5735 V16QI_type_node, NULL_TREE);
5736 tree v4si_ftype_char
5737 = build_function_type_list (V4SI_type_node, char_type_node, NULL_TREE);
5738 tree v8hi_ftype_char
5739 = build_function_type_list (V8HI_type_node, char_type_node, NULL_TREE);
5740 tree v16qi_ftype_char
5741 = build_function_type_list (V16QI_type_node, char_type_node, NULL_TREE);
5742 tree v8hi_ftype_v16qi
5743 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
5744 tree v4sf_ftype_v4sf
5745 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
5747 tree v2si_ftype_v2si_v2si
5748 = build_function_type_list (V2SI_type_node,
5749 V2SI_type_node, V2SI_type_node, NULL_TREE);
5751 tree v2sf_ftype_v2sf_v2sf
5752 = build_function_type_list (V2SF_type_node,
5753 V2SF_type_node, V2SF_type_node, NULL_TREE);
5755 tree v2si_ftype_int_int
5756 = build_function_type_list (V2SI_type_node,
5757 integer_type_node, integer_type_node,
5758 NULL_TREE);
5760 tree v2si_ftype_v2si
5761 = build_function_type_list (V2SI_type_node, V2SI_type_node, NULL_TREE);
5763 tree v2sf_ftype_v2sf
5764 = build_function_type_list (V2SF_type_node,
5765 V2SF_type_node, NULL_TREE);
5767 tree v2sf_ftype_v2si
5768 = build_function_type_list (V2SF_type_node,
5769 V2SI_type_node, NULL_TREE);
5771 tree v2si_ftype_v2sf
5772 = build_function_type_list (V2SI_type_node,
5773 V2SF_type_node, NULL_TREE);
5775 tree v2si_ftype_v2si_char
5776 = build_function_type_list (V2SI_type_node,
5777 V2SI_type_node, char_type_node, NULL_TREE);
5779 tree v2si_ftype_int_char
5780 = build_function_type_list (V2SI_type_node,
5781 integer_type_node, char_type_node, NULL_TREE);
5783 tree v2si_ftype_char
5784 = build_function_type_list (V2SI_type_node, char_type_node, NULL_TREE);
5786 tree int_ftype_int_int
5787 = build_function_type_list (integer_type_node,
5788 integer_type_node, integer_type_node,
5789 NULL_TREE);
5791 tree v4si_ftype_v4si_v4si
5792 = build_function_type_list (V4SI_type_node,
5793 V4SI_type_node, V4SI_type_node, NULL_TREE);
5794 tree v4sf_ftype_v4si_char
5795 = build_function_type_list (V4SF_type_node,
5796 V4SI_type_node, char_type_node, NULL_TREE);
5797 tree v4si_ftype_v4sf_char
5798 = build_function_type_list (V4SI_type_node,
5799 V4SF_type_node, char_type_node, NULL_TREE);
5800 tree v4si_ftype_v4si_char
5801 = build_function_type_list (V4SI_type_node,
5802 V4SI_type_node, char_type_node, NULL_TREE);
5803 tree v8hi_ftype_v8hi_char
5804 = build_function_type_list (V8HI_type_node,
5805 V8HI_type_node, char_type_node, NULL_TREE);
5806 tree v16qi_ftype_v16qi_char
5807 = build_function_type_list (V16QI_type_node,
5808 V16QI_type_node, char_type_node, NULL_TREE);
5809 tree v16qi_ftype_v16qi_v16qi_char
5810 = build_function_type_list (V16QI_type_node,
5811 V16QI_type_node, V16QI_type_node,
5812 char_type_node, NULL_TREE);
5813 tree v8hi_ftype_v8hi_v8hi_char
5814 = build_function_type_list (V8HI_type_node,
5815 V8HI_type_node, V8HI_type_node,
5816 char_type_node, NULL_TREE);
5817 tree v4si_ftype_v4si_v4si_char
5818 = build_function_type_list (V4SI_type_node,
5819 V4SI_type_node, V4SI_type_node,
5820 char_type_node, NULL_TREE);
5821 tree v4sf_ftype_v4sf_v4sf_char
5822 = build_function_type_list (V4SF_type_node,
5823 V4SF_type_node, V4SF_type_node,
5824 char_type_node, NULL_TREE);
5825 tree v4sf_ftype_v4sf_v4sf
5826 = build_function_type_list (V4SF_type_node,
5827 V4SF_type_node, V4SF_type_node, NULL_TREE);
5828 tree v4sf_ftype_v4sf_v4sf_v4si
5829 = build_function_type_list (V4SF_type_node,
5830 V4SF_type_node, V4SF_type_node,
5831 V4SI_type_node, NULL_TREE);
5832 tree v4sf_ftype_v4sf_v4sf_v4sf
5833 = build_function_type_list (V4SF_type_node,
5834 V4SF_type_node, V4SF_type_node,
5835 V4SF_type_node, NULL_TREE);
5836 tree v4si_ftype_v4si_v4si_v4si
5837 = build_function_type_list (V4SI_type_node,
5838 V4SI_type_node, V4SI_type_node,
5839 V4SI_type_node, NULL_TREE);
5840 tree v8hi_ftype_v8hi_v8hi
5841 = build_function_type_list (V8HI_type_node,
5842 V8HI_type_node, V8HI_type_node, NULL_TREE);
5843 tree v8hi_ftype_v8hi_v8hi_v8hi
5844 = build_function_type_list (V8HI_type_node,
5845 V8HI_type_node, V8HI_type_node,
5846 V8HI_type_node, NULL_TREE);
5847 tree v4si_ftype_v8hi_v8hi_v4si
5848 = build_function_type_list (V4SI_type_node,
5849 V8HI_type_node, V8HI_type_node,
5850 V4SI_type_node, NULL_TREE);
5851 tree v4si_ftype_v16qi_v16qi_v4si
5852 = build_function_type_list (V4SI_type_node,
5853 V16QI_type_node, V16QI_type_node,
5854 V4SI_type_node, NULL_TREE);
5855 tree v16qi_ftype_v16qi_v16qi
5856 = build_function_type_list (V16QI_type_node,
5857 V16QI_type_node, V16QI_type_node, NULL_TREE);
5858 tree v4si_ftype_v4sf_v4sf
5859 = build_function_type_list (V4SI_type_node,
5860 V4SF_type_node, V4SF_type_node, NULL_TREE);
5861 tree v8hi_ftype_v16qi_v16qi
5862 = build_function_type_list (V8HI_type_node,
5863 V16QI_type_node, V16QI_type_node, NULL_TREE);
5864 tree v4si_ftype_v8hi_v8hi
5865 = build_function_type_list (V4SI_type_node,
5866 V8HI_type_node, V8HI_type_node, NULL_TREE);
5867 tree v8hi_ftype_v4si_v4si
5868 = build_function_type_list (V8HI_type_node,
5869 V4SI_type_node, V4SI_type_node, NULL_TREE);
5870 tree v16qi_ftype_v8hi_v8hi
5871 = build_function_type_list (V16QI_type_node,
5872 V8HI_type_node, V8HI_type_node, NULL_TREE);
5873 tree v4si_ftype_v16qi_v4si
5874 = build_function_type_list (V4SI_type_node,
5875 V16QI_type_node, V4SI_type_node, NULL_TREE);
5876 tree v4si_ftype_v16qi_v16qi
5877 = build_function_type_list (V4SI_type_node,
5878 V16QI_type_node, V16QI_type_node, NULL_TREE);
5879 tree v4si_ftype_v8hi_v4si
5880 = build_function_type_list (V4SI_type_node,
5881 V8HI_type_node, V4SI_type_node, NULL_TREE);
5882 tree v4si_ftype_v8hi
5883 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
5884 tree int_ftype_v4si_v4si
5885 = build_function_type_list (integer_type_node,
5886 V4SI_type_node, V4SI_type_node, NULL_TREE);
5887 tree int_ftype_v4sf_v4sf
5888 = build_function_type_list (integer_type_node,
5889 V4SF_type_node, V4SF_type_node, NULL_TREE);
5890 tree int_ftype_v16qi_v16qi
5891 = build_function_type_list (integer_type_node,
5892 V16QI_type_node, V16QI_type_node, NULL_TREE);
5893 tree int_ftype_v8hi_v8hi
5894 = build_function_type_list (integer_type_node,
5895 V8HI_type_node, V8HI_type_node, NULL_TREE);
5897 /* Add the simple ternary operators. */
5898 d = (struct builtin_description *) bdesc_3arg;
5899 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
5902 enum machine_mode mode0, mode1, mode2, mode3;
5903 tree type;
5905 if (d->name == 0 || d->icode == CODE_FOR_nothing)
5906 continue;
5908 mode0 = insn_data[d->icode].operand[0].mode;
5909 mode1 = insn_data[d->icode].operand[1].mode;
5910 mode2 = insn_data[d->icode].operand[2].mode;
5911 mode3 = insn_data[d->icode].operand[3].mode;
5913 /* When all four are of the same mode. */
5914 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
5916 switch (mode0)
5918 case V4SImode:
5919 type = v4si_ftype_v4si_v4si_v4si;
5920 break;
5921 case V4SFmode:
5922 type = v4sf_ftype_v4sf_v4sf_v4sf;
5923 break;
5924 case V8HImode:
5925 type = v8hi_ftype_v8hi_v8hi_v8hi;
5926 break;
5927 case V16QImode:
5928 type = v16qi_ftype_v16qi_v16qi_v16qi;
5929 break;
5930 default:
5931 abort();
5934 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
5936 switch (mode0)
5938 case V4SImode:
5939 type = v4si_ftype_v4si_v4si_v16qi;
5940 break;
5941 case V4SFmode:
5942 type = v4sf_ftype_v4sf_v4sf_v16qi;
5943 break;
5944 case V8HImode:
5945 type = v8hi_ftype_v8hi_v8hi_v16qi;
5946 break;
5947 case V16QImode:
5948 type = v16qi_ftype_v16qi_v16qi_v16qi;
5949 break;
5950 default:
5951 abort();
5954 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
5955 && mode3 == V4SImode)
5956 type = v4si_ftype_v16qi_v16qi_v4si;
5957 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
5958 && mode3 == V4SImode)
5959 type = v4si_ftype_v8hi_v8hi_v4si;
5960 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
5961 && mode3 == V4SImode)
5962 type = v4sf_ftype_v4sf_v4sf_v4si;
5964 /* vchar, vchar, vchar, 4 bit literal. */
5965 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
5966 && mode3 == QImode)
5967 type = v16qi_ftype_v16qi_v16qi_char;
5969 /* vshort, vshort, vshort, 4 bit literal. */
5970 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
5971 && mode3 == QImode)
5972 type = v8hi_ftype_v8hi_v8hi_char;
5974 /* vint, vint, vint, 4 bit literal. */
5975 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
5976 && mode3 == QImode)
5977 type = v4si_ftype_v4si_v4si_char;
5979 /* vfloat, vfloat, vfloat, 4 bit literal. */
5980 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
5981 && mode3 == QImode)
5982 type = v4sf_ftype_v4sf_v4sf_char;
5984 else
5985 abort ();
5987 def_builtin (d->mask, d->name, type, d->code);
5990 /* Add the simple binary operators. */
5991 d = (struct builtin_description *) bdesc_2arg;
5992 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
5994 enum machine_mode mode0, mode1, mode2;
5995 tree type;
5997 if (d->name == 0 || d->icode == CODE_FOR_nothing)
5998 continue;
6000 mode0 = insn_data[d->icode].operand[0].mode;
6001 mode1 = insn_data[d->icode].operand[1].mode;
6002 mode2 = insn_data[d->icode].operand[2].mode;
6004 /* When all three operands are of the same mode. */
6005 if (mode0 == mode1 && mode1 == mode2)
6007 switch (mode0)
6009 case V4SFmode:
6010 type = v4sf_ftype_v4sf_v4sf;
6011 break;
6012 case V4SImode:
6013 type = v4si_ftype_v4si_v4si;
6014 break;
6015 case V16QImode:
6016 type = v16qi_ftype_v16qi_v16qi;
6017 break;
6018 case V8HImode:
6019 type = v8hi_ftype_v8hi_v8hi;
6020 break;
6021 case V2SImode:
6022 type = v2si_ftype_v2si_v2si;
6023 break;
6024 case V2SFmode:
6025 type = v2sf_ftype_v2sf_v2sf;
6026 break;
6027 case SImode:
6028 type = int_ftype_int_int;
6029 break;
6030 default:
6031 abort ();
6035 /* A few other combos we really don't want to do manually. */
6037 /* vint, vfloat, vfloat. */
6038 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
6039 type = v4si_ftype_v4sf_v4sf;
6041 /* vshort, vchar, vchar. */
6042 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
6043 type = v8hi_ftype_v16qi_v16qi;
6045 /* vint, vshort, vshort. */
6046 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
6047 type = v4si_ftype_v8hi_v8hi;
6049 /* vshort, vint, vint. */
6050 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
6051 type = v8hi_ftype_v4si_v4si;
6053 /* vchar, vshort, vshort. */
6054 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
6055 type = v16qi_ftype_v8hi_v8hi;
6057 /* vint, vchar, vint. */
6058 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
6059 type = v4si_ftype_v16qi_v4si;
6061 /* vint, vchar, vchar. */
6062 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
6063 type = v4si_ftype_v16qi_v16qi;
6065 /* vint, vshort, vint. */
6066 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
6067 type = v4si_ftype_v8hi_v4si;
6069 /* vint, vint, 5 bit literal. */
6070 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
6071 type = v4si_ftype_v4si_char;
6073 /* vshort, vshort, 5 bit literal. */
6074 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
6075 type = v8hi_ftype_v8hi_char;
6077 /* vchar, vchar, 5 bit literal. */
6078 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
6079 type = v16qi_ftype_v16qi_char;
6081 /* vfloat, vint, 5 bit literal. */
6082 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
6083 type = v4sf_ftype_v4si_char;
6085 /* vint, vfloat, 5 bit literal. */
6086 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
6087 type = v4si_ftype_v4sf_char;
6089 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
6090 type = v2si_ftype_int_int;
6092 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
6093 type = v2si_ftype_v2si_char;
6095 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
6096 type = v2si_ftype_int_char;
6098 /* int, x, x. */
6099 else if (mode0 == SImode)
6101 switch (mode1)
6103 case V4SImode:
6104 type = int_ftype_v4si_v4si;
6105 break;
6106 case V4SFmode:
6107 type = int_ftype_v4sf_v4sf;
6108 break;
6109 case V16QImode:
6110 type = int_ftype_v16qi_v16qi;
6111 break;
6112 case V8HImode:
6113 type = int_ftype_v8hi_v8hi;
6114 break;
6115 default:
6116 abort ();
6120 else
6121 abort ();
6123 def_builtin (d->mask, d->name, type, d->code);
6126 /* Add the simple unary operators. */
6127 d = (struct builtin_description *) bdesc_1arg;
6128 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6130 enum machine_mode mode0, mode1;
6131 tree type;
6133 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6134 continue;
6136 mode0 = insn_data[d->icode].operand[0].mode;
6137 mode1 = insn_data[d->icode].operand[1].mode;
6139 if (mode0 == V4SImode && mode1 == QImode)
6140 type = v4si_ftype_char;
6141 else if (mode0 == V8HImode && mode1 == QImode)
6142 type = v8hi_ftype_char;
6143 else if (mode0 == V16QImode && mode1 == QImode)
6144 type = v16qi_ftype_char;
6145 else if (mode0 == V4SFmode && mode1 == V4SFmode)
6146 type = v4sf_ftype_v4sf;
6147 else if (mode0 == V8HImode && mode1 == V16QImode)
6148 type = v8hi_ftype_v16qi;
6149 else if (mode0 == V4SImode && mode1 == V8HImode)
6150 type = v4si_ftype_v8hi;
6151 else if (mode0 == V2SImode && mode1 == V2SImode)
6152 type = v2si_ftype_v2si;
6153 else if (mode0 == V2SFmode && mode1 == V2SFmode)
6154 type = v2sf_ftype_v2sf;
6155 else if (mode0 == V2SFmode && mode1 == V2SImode)
6156 type = v2sf_ftype_v2si;
6157 else if (mode0 == V2SImode && mode1 == V2SFmode)
6158 type = v2si_ftype_v2sf;
6159 else if (mode0 == V2SImode && mode1 == QImode)
6160 type = v2si_ftype_char;
6161 else
6162 abort ();
6164 def_builtin (d->mask, d->name, type, d->code);
6169 /* Expand a block move operation, and return 1 if successful. Return 0
6170 if we should let the compiler generate normal code.
6172 operands[0] is the destination
6173 operands[1] is the source
6174 operands[2] is the length
6175 operands[3] is the alignment */
6177 #define MAX_MOVE_REG 4
6180 expand_block_move (operands)
6181 rtx operands[];
6183 rtx orig_dest = operands[0];
6184 rtx orig_src = operands[1];
6185 rtx bytes_rtx = operands[2];
6186 rtx align_rtx = operands[3];
6187 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
6188 int align;
6189 int bytes;
6190 int offset;
6191 int move_bytes;
6193 /* If this is not a fixed size move, just call memcpy */
6194 if (! constp)
6195 return 0;
6197 /* If this is not a fixed size alignment, abort */
6198 if (GET_CODE (align_rtx) != CONST_INT)
6199 abort ();
6200 align = INTVAL (align_rtx);
6202 /* Anything to move? */
6203 bytes = INTVAL (bytes_rtx);
6204 if (bytes <= 0)
6205 return 1;
6207 /* store_one_arg depends on expand_block_move to handle at least the size of
6208 reg_parm_stack_space. */
6209 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
6210 return 0;
6212 if (TARGET_STRING) /* string instructions are available */
6214 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
6216 union {
6217 rtx (*movstrsi) PARAMS ((rtx, rtx, rtx, rtx));
6218 rtx (*mov) PARAMS ((rtx, rtx));
6219 } gen_func;
6220 enum machine_mode mode = BLKmode;
6221 rtx src, dest;
6223 if (bytes > 24 /* move up to 32 bytes at a time */
6224 && ! fixed_regs[5]
6225 && ! fixed_regs[6]
6226 && ! fixed_regs[7]
6227 && ! fixed_regs[8]
6228 && ! fixed_regs[9]
6229 && ! fixed_regs[10]
6230 && ! fixed_regs[11]
6231 && ! fixed_regs[12])
6233 move_bytes = (bytes > 32) ? 32 : bytes;
6234 gen_func.movstrsi = gen_movstrsi_8reg;
6236 else if (bytes > 16 /* move up to 24 bytes at a time */
6237 && ! fixed_regs[5]
6238 && ! fixed_regs[6]
6239 && ! fixed_regs[7]
6240 && ! fixed_regs[8]
6241 && ! fixed_regs[9]
6242 && ! fixed_regs[10])
6244 move_bytes = (bytes > 24) ? 24 : bytes;
6245 gen_func.movstrsi = gen_movstrsi_6reg;
6247 else if (bytes > 8 /* move up to 16 bytes at a time */
6248 && ! fixed_regs[5]
6249 && ! fixed_regs[6]
6250 && ! fixed_regs[7]
6251 && ! fixed_regs[8])
6253 move_bytes = (bytes > 16) ? 16 : bytes;
6254 gen_func.movstrsi = gen_movstrsi_4reg;
6256 else if (bytes >= 8 && TARGET_POWERPC64
6257 /* 64-bit loads and stores require word-aligned
6258 displacements. */
6259 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
6261 move_bytes = 8;
6262 mode = DImode;
6263 gen_func.mov = gen_movdi;
6265 else if (bytes > 4 && !TARGET_POWERPC64)
6266 { /* move up to 8 bytes at a time */
6267 move_bytes = (bytes > 8) ? 8 : bytes;
6268 gen_func.movstrsi = gen_movstrsi_2reg;
6270 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
6271 { /* move 4 bytes */
6272 move_bytes = 4;
6273 mode = SImode;
6274 gen_func.mov = gen_movsi;
6276 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
6277 { /* move 2 bytes */
6278 move_bytes = 2;
6279 mode = HImode;
6280 gen_func.mov = gen_movhi;
6282 else if (bytes == 1) /* move 1 byte */
6284 move_bytes = 1;
6285 mode = QImode;
6286 gen_func.mov = gen_movqi;
6288 else
6289 { /* move up to 4 bytes at a time */
6290 move_bytes = (bytes > 4) ? 4 : bytes;
6291 gen_func.movstrsi = gen_movstrsi_1reg;
6294 src = adjust_address (orig_src, mode, offset);
6295 dest = adjust_address (orig_dest, mode, offset);
6297 if (mode == BLKmode)
6299 /* Move the address into scratch registers. The movstrsi
6300 patterns require zero offset. */
6301 if (!REG_P (XEXP (src, 0)))
6303 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
6304 src = replace_equiv_address (src, src_reg);
6306 set_mem_size (src, GEN_INT (move_bytes));
6308 if (!REG_P (XEXP (dest, 0)))
6310 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
6311 dest = replace_equiv_address (dest, dest_reg);
6313 set_mem_size (dest, GEN_INT (move_bytes));
6315 emit_insn ((*gen_func.movstrsi) (dest, src,
6316 GEN_INT (move_bytes & 31),
6317 align_rtx));
6319 else
6321 rtx tmp_reg = gen_reg_rtx (mode);
6323 emit_insn ((*gen_func.mov) (tmp_reg, src));
6324 emit_insn ((*gen_func.mov) (dest, tmp_reg));
6329 else /* string instructions not available */
6331 rtx stores[MAX_MOVE_REG];
6332 int num_reg = 0;
6333 int i;
6335 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
6337 rtx (*gen_mov_func) PARAMS ((rtx, rtx));
6338 enum machine_mode mode;
6339 rtx src, dest, tmp_reg;
6341 /* Generate the appropriate load and store, saving the stores
6342 for later. */
6343 if (bytes >= 8 && TARGET_POWERPC64
6344 /* 64-bit loads and stores require word-aligned
6345 displacements. */
6346 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
6348 move_bytes = 8;
6349 mode = DImode;
6350 gen_mov_func = gen_movdi;
6352 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
6354 move_bytes = 4;
6355 mode = SImode;
6356 gen_mov_func = gen_movsi;
6358 else if (bytes >= 2 && (align >= 2 || ! STRICT_ALIGNMENT))
6360 move_bytes = 2;
6361 mode = HImode;
6362 gen_mov_func = gen_movhi;
6364 else
6366 move_bytes = 1;
6367 mode = QImode;
6368 gen_mov_func = gen_movqi;
6371 src = adjust_address (orig_src, mode, offset);
6372 dest = adjust_address (orig_dest, mode, offset);
6373 tmp_reg = gen_reg_rtx (mode);
6375 emit_insn ((*gen_mov_func) (tmp_reg, src));
6376 stores[num_reg++] = (*gen_mov_func) (dest, tmp_reg);
6378 if (num_reg >= MAX_MOVE_REG)
6380 for (i = 0; i < num_reg; i++)
6381 emit_insn (stores[i]);
6382 num_reg = 0;
6386 for (i = 0; i < num_reg; i++)
6387 emit_insn (stores[i]);
6390 return 1;
6394 /* Return 1 if OP is a load multiple operation. It is known to be a
6395 PARALLEL and the first section will be tested. */
6398 load_multiple_operation (op, mode)
6399 rtx op;
6400 enum machine_mode mode ATTRIBUTE_UNUSED;
6402 int count = XVECLEN (op, 0);
6403 unsigned int dest_regno;
6404 rtx src_addr;
6405 int i;
6407 /* Perform a quick check so we don't blow up below. */
6408 if (count <= 1
6409 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6410 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6411 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
6412 return 0;
6414 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6415 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
6417 for (i = 1; i < count; i++)
6419 rtx elt = XVECEXP (op, 0, i);
6421 if (GET_CODE (elt) != SET
6422 || GET_CODE (SET_DEST (elt)) != REG
6423 || GET_MODE (SET_DEST (elt)) != SImode
6424 || REGNO (SET_DEST (elt)) != dest_regno + i
6425 || GET_CODE (SET_SRC (elt)) != MEM
6426 || GET_MODE (SET_SRC (elt)) != SImode
6427 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
6428 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
6429 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
6430 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
6431 return 0;
6434 return 1;
6437 /* Similar, but tests for store multiple. Here, the second vector element
6438 is a CLOBBER. It will be tested later. */
6441 store_multiple_operation (op, mode)
6442 rtx op;
6443 enum machine_mode mode ATTRIBUTE_UNUSED;
6445 int count = XVECLEN (op, 0) - 1;
6446 unsigned int src_regno;
6447 rtx dest_addr;
6448 int i;
6450 /* Perform a quick check so we don't blow up below. */
6451 if (count <= 1
6452 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6453 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
6454 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
6455 return 0;
6457 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
6458 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
6460 for (i = 1; i < count; i++)
6462 rtx elt = XVECEXP (op, 0, i + 1);
6464 if (GET_CODE (elt) != SET
6465 || GET_CODE (SET_SRC (elt)) != REG
6466 || GET_MODE (SET_SRC (elt)) != SImode
6467 || REGNO (SET_SRC (elt)) != src_regno + i
6468 || GET_CODE (SET_DEST (elt)) != MEM
6469 || GET_MODE (SET_DEST (elt)) != SImode
6470 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
6471 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
6472 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
6473 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
6474 return 0;
6477 return 1;
6480 /* Return a string to perform a load_multiple operation.
6481 operands[0] is the vector.
6482 operands[1] is the source address.
6483 operands[2] is the first destination register. */
6485 const char *
6486 rs6000_output_load_multiple (operands)
6487 rtx operands[3];
6489 /* We have to handle the case where the pseudo used to contain the address
6490 is assigned to one of the output registers. */
6491 int i, j;
6492 int words = XVECLEN (operands[0], 0);
6493 rtx xop[10];
6495 if (XVECLEN (operands[0], 0) == 1)
6496 return "{l|lwz} %2,0(%1)";
6498 for (i = 0; i < words; i++)
6499 if (refers_to_regno_p (REGNO (operands[2]) + i,
6500 REGNO (operands[2]) + i + 1, operands[1], 0))
6502 if (i == words-1)
6504 xop[0] = GEN_INT (4 * (words-1));
6505 xop[1] = operands[1];
6506 xop[2] = operands[2];
6507 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
6508 return "";
6510 else if (i == 0)
6512 xop[0] = GEN_INT (4 * (words-1));
6513 xop[1] = operands[1];
6514 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
6515 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
6516 return "";
6518 else
6520 for (j = 0; j < words; j++)
6521 if (j != i)
6523 xop[0] = GEN_INT (j * 4);
6524 xop[1] = operands[1];
6525 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
6526 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
6528 xop[0] = GEN_INT (i * 4);
6529 xop[1] = operands[1];
6530 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
6531 return "";
6535 return "{lsi|lswi} %2,%1,%N0";
6538 /* Return 1 for a parallel vrsave operation. */
6541 vrsave_operation (op, mode)
6542 rtx op;
6543 enum machine_mode mode ATTRIBUTE_UNUSED;
6545 int count = XVECLEN (op, 0);
6546 unsigned int dest_regno, src_regno;
6547 int i;
6549 if (count <= 1
6550 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6551 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6552 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
6553 return 0;
6555 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6556 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
6558 if (dest_regno != VRSAVE_REGNO
6559 && src_regno != VRSAVE_REGNO)
6560 return 0;
6562 for (i = 1; i < count; i++)
6564 rtx elt = XVECEXP (op, 0, i);
6566 if (GET_CODE (elt) != CLOBBER
6567 && GET_CODE (elt) != SET)
6568 return 0;
6571 return 1;
6574 /* Return 1 for an PARALLEL suitable for mtcrf. */
6577 mtcrf_operation (op, mode)
6578 rtx op;
6579 enum machine_mode mode ATTRIBUTE_UNUSED;
6581 int count = XVECLEN (op, 0);
6582 int i;
6583 rtx src_reg;
6585 /* Perform a quick check so we don't blow up below. */
6586 if (count < 1
6587 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6588 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
6589 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
6590 return 0;
6591 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
6593 if (GET_CODE (src_reg) != REG
6594 || GET_MODE (src_reg) != SImode
6595 || ! INT_REGNO_P (REGNO (src_reg)))
6596 return 0;
6598 for (i = 0; i < count; i++)
6600 rtx exp = XVECEXP (op, 0, i);
6601 rtx unspec;
6602 int maskval;
6604 if (GET_CODE (exp) != SET
6605 || GET_CODE (SET_DEST (exp)) != REG
6606 || GET_MODE (SET_DEST (exp)) != CCmode
6607 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
6608 return 0;
6609 unspec = SET_SRC (exp);
6610 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
6612 if (GET_CODE (unspec) != UNSPEC
6613 || XINT (unspec, 1) != 20
6614 || XVECLEN (unspec, 0) != 2
6615 || XVECEXP (unspec, 0, 0) != src_reg
6616 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
6617 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
6618 return 0;
6620 return 1;
6623 /* Return 1 for an PARALLEL suitable for lmw. */
6626 lmw_operation (op, mode)
6627 rtx op;
6628 enum machine_mode mode ATTRIBUTE_UNUSED;
6630 int count = XVECLEN (op, 0);
6631 unsigned int dest_regno;
6632 rtx src_addr;
6633 unsigned int base_regno;
6634 HOST_WIDE_INT offset;
6635 int i;
6637 /* Perform a quick check so we don't blow up below. */
6638 if (count <= 1
6639 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6640 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6641 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
6642 return 0;
6644 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6645 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
6647 if (dest_regno > 31
6648 || count != 32 - (int) dest_regno)
6649 return 0;
6651 if (LEGITIMATE_INDIRECT_ADDRESS_P (src_addr, 0))
6653 offset = 0;
6654 base_regno = REGNO (src_addr);
6655 if (base_regno == 0)
6656 return 0;
6658 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, src_addr, 0))
6660 offset = INTVAL (XEXP (src_addr, 1));
6661 base_regno = REGNO (XEXP (src_addr, 0));
6663 else
6664 return 0;
6666 for (i = 0; i < count; i++)
6668 rtx elt = XVECEXP (op, 0, i);
6669 rtx newaddr;
6670 rtx addr_reg;
6671 HOST_WIDE_INT newoffset;
6673 if (GET_CODE (elt) != SET
6674 || GET_CODE (SET_DEST (elt)) != REG
6675 || GET_MODE (SET_DEST (elt)) != SImode
6676 || REGNO (SET_DEST (elt)) != dest_regno + i
6677 || GET_CODE (SET_SRC (elt)) != MEM
6678 || GET_MODE (SET_SRC (elt)) != SImode)
6679 return 0;
6680 newaddr = XEXP (SET_SRC (elt), 0);
6681 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
6683 newoffset = 0;
6684 addr_reg = newaddr;
6686 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
6688 addr_reg = XEXP (newaddr, 0);
6689 newoffset = INTVAL (XEXP (newaddr, 1));
6691 else
6692 return 0;
6693 if (REGNO (addr_reg) != base_regno
6694 || newoffset != offset + 4 * i)
6695 return 0;
6698 return 1;
6701 /* Return 1 for an PARALLEL suitable for stmw. */
6704 stmw_operation (op, mode)
6705 rtx op;
6706 enum machine_mode mode ATTRIBUTE_UNUSED;
6708 int count = XVECLEN (op, 0);
6709 unsigned int src_regno;
6710 rtx dest_addr;
6711 unsigned int base_regno;
6712 HOST_WIDE_INT offset;
6713 int i;
6715 /* Perform a quick check so we don't blow up below. */
6716 if (count <= 1
6717 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6718 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
6719 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
6720 return 0;
6722 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
6723 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
6725 if (src_regno > 31
6726 || count != 32 - (int) src_regno)
6727 return 0;
6729 if (LEGITIMATE_INDIRECT_ADDRESS_P (dest_addr, 0))
6731 offset = 0;
6732 base_regno = REGNO (dest_addr);
6733 if (base_regno == 0)
6734 return 0;
6736 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, dest_addr, 0))
6738 offset = INTVAL (XEXP (dest_addr, 1));
6739 base_regno = REGNO (XEXP (dest_addr, 0));
6741 else
6742 return 0;
6744 for (i = 0; i < count; i++)
6746 rtx elt = XVECEXP (op, 0, i);
6747 rtx newaddr;
6748 rtx addr_reg;
6749 HOST_WIDE_INT newoffset;
6751 if (GET_CODE (elt) != SET
6752 || GET_CODE (SET_SRC (elt)) != REG
6753 || GET_MODE (SET_SRC (elt)) != SImode
6754 || REGNO (SET_SRC (elt)) != src_regno + i
6755 || GET_CODE (SET_DEST (elt)) != MEM
6756 || GET_MODE (SET_DEST (elt)) != SImode)
6757 return 0;
6758 newaddr = XEXP (SET_DEST (elt), 0);
6759 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
6761 newoffset = 0;
6762 addr_reg = newaddr;
6764 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
6766 addr_reg = XEXP (newaddr, 0);
6767 newoffset = INTVAL (XEXP (newaddr, 1));
6769 else
6770 return 0;
6771 if (REGNO (addr_reg) != base_regno
6772 || newoffset != offset + 4 * i)
6773 return 0;
6776 return 1;
6779 /* A validation routine: say whether CODE, a condition code, and MODE
6780 match. The other alternatives either don't make sense or should
6781 never be generated. */
6783 static void
6784 validate_condition_mode (code, mode)
6785 enum rtx_code code;
6786 enum machine_mode mode;
6788 if (GET_RTX_CLASS (code) != '<'
6789 || GET_MODE_CLASS (mode) != MODE_CC)
6790 abort ();
6792 /* These don't make sense. */
6793 if ((code == GT || code == LT || code == GE || code == LE)
6794 && mode == CCUNSmode)
6795 abort ();
6797 if ((code == GTU || code == LTU || code == GEU || code == LEU)
6798 && mode != CCUNSmode)
6799 abort ();
6801 if (mode != CCFPmode
6802 && (code == ORDERED || code == UNORDERED
6803 || code == UNEQ || code == LTGT
6804 || code == UNGT || code == UNLT
6805 || code == UNGE || code == UNLE))
6806 abort ();
6808 /* These should never be generated except for
6809 flag_unsafe_math_optimizations and flag_finite_math_only. */
6810 if (mode == CCFPmode
6811 && ! flag_unsafe_math_optimizations
6812 && ! flag_finite_math_only
6813 && (code == LE || code == GE
6814 || code == UNEQ || code == LTGT
6815 || code == UNGT || code == UNLT))
6816 abort ();
6818 /* These are invalid; the information is not there. */
6819 if (mode == CCEQmode
6820 && code != EQ && code != NE)
6821 abort ();
6824 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
6825 We only check the opcode against the mode of the CC value here. */
6828 branch_comparison_operator (op, mode)
6829 rtx op;
6830 enum machine_mode mode ATTRIBUTE_UNUSED;
6832 enum rtx_code code = GET_CODE (op);
6833 enum machine_mode cc_mode;
6835 if (GET_RTX_CLASS (code) != '<')
6836 return 0;
6838 cc_mode = GET_MODE (XEXP (op, 0));
6839 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
6840 return 0;
6842 validate_condition_mode (code, cc_mode);
6844 return 1;
6847 /* Return 1 if OP is a comparison operation that is valid for a branch
6848 insn and which is true if the corresponding bit in the CC register
6849 is set. */
6852 branch_positive_comparison_operator (op, mode)
6853 rtx op;
6854 enum machine_mode mode;
6856 enum rtx_code code;
6858 if (! branch_comparison_operator (op, mode))
6859 return 0;
6861 code = GET_CODE (op);
6862 return (code == EQ || code == LT || code == GT
6863 || (TARGET_SPE && TARGET_HARD_FLOAT && !TARGET_FPRS && code == NE)
6864 || code == LTU || code == GTU
6865 || code == UNORDERED);
6868 /* Return 1 if OP is a comparison operation that is valid for an scc insn.
6869 We check the opcode against the mode of the CC value and disallow EQ or
6870 NE comparisons for integers. */
6873 scc_comparison_operator (op, mode)
6874 rtx op;
6875 enum machine_mode mode;
6877 enum rtx_code code = GET_CODE (op);
6878 enum machine_mode cc_mode;
6880 if (GET_MODE (op) != mode && mode != VOIDmode)
6881 return 0;
6883 if (GET_RTX_CLASS (code) != '<')
6884 return 0;
6886 cc_mode = GET_MODE (XEXP (op, 0));
6887 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
6888 return 0;
6890 validate_condition_mode (code, cc_mode);
6892 if (code == NE && cc_mode != CCFPmode)
6893 return 0;
6895 return 1;
6899 trap_comparison_operator (op, mode)
6900 rtx op;
6901 enum machine_mode mode;
6903 if (mode != VOIDmode && mode != GET_MODE (op))
6904 return 0;
6905 return GET_RTX_CLASS (GET_CODE (op)) == '<';
6909 boolean_operator (op, mode)
6910 rtx op;
6911 enum machine_mode mode ATTRIBUTE_UNUSED;
6913 enum rtx_code code = GET_CODE (op);
6914 return (code == AND || code == IOR || code == XOR);
6918 boolean_or_operator (op, mode)
6919 rtx op;
6920 enum machine_mode mode ATTRIBUTE_UNUSED;
6922 enum rtx_code code = GET_CODE (op);
6923 return (code == IOR || code == XOR);
6927 min_max_operator (op, mode)
6928 rtx op;
6929 enum machine_mode mode ATTRIBUTE_UNUSED;
6931 enum rtx_code code = GET_CODE (op);
6932 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
6935 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
6936 mask required to convert the result of a rotate insn into a shift
6937 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
6940 includes_lshift_p (shiftop, andop)
6941 rtx shiftop;
6942 rtx andop;
6944 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
6946 shift_mask <<= INTVAL (shiftop);
6948 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
6951 /* Similar, but for right shift. */
6954 includes_rshift_p (shiftop, andop)
6955 rtx shiftop;
6956 rtx andop;
6958 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
6960 shift_mask >>= INTVAL (shiftop);
6962 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
6965 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
6966 to perform a left shift. It must have exactly SHIFTOP least
6967 significant 0's, then one or more 1's, then zero or more 0's. */
6970 includes_rldic_lshift_p (shiftop, andop)
6971 rtx shiftop;
6972 rtx andop;
6974 if (GET_CODE (andop) == CONST_INT)
6976 HOST_WIDE_INT c, lsb, shift_mask;
6978 c = INTVAL (andop);
6979 if (c == 0 || c == ~0)
6980 return 0;
6982 shift_mask = ~0;
6983 shift_mask <<= INTVAL (shiftop);
6985 /* Find the least significant one bit. */
6986 lsb = c & -c;
6988 /* It must coincide with the LSB of the shift mask. */
6989 if (-lsb != shift_mask)
6990 return 0;
6992 /* Invert to look for the next transition (if any). */
6993 c = ~c;
6995 /* Remove the low group of ones (originally low group of zeros). */
6996 c &= -lsb;
6998 /* Again find the lsb, and check we have all 1's above. */
6999 lsb = c & -c;
7000 return c == -lsb;
7002 else if (GET_CODE (andop) == CONST_DOUBLE
7003 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
7005 HOST_WIDE_INT low, high, lsb;
7006 HOST_WIDE_INT shift_mask_low, shift_mask_high;
7008 low = CONST_DOUBLE_LOW (andop);
7009 if (HOST_BITS_PER_WIDE_INT < 64)
7010 high = CONST_DOUBLE_HIGH (andop);
7012 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
7013 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
7014 return 0;
7016 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
7018 shift_mask_high = ~0;
7019 if (INTVAL (shiftop) > 32)
7020 shift_mask_high <<= INTVAL (shiftop) - 32;
7022 lsb = high & -high;
7024 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
7025 return 0;
7027 high = ~high;
7028 high &= -lsb;
7030 lsb = high & -high;
7031 return high == -lsb;
7034 shift_mask_low = ~0;
7035 shift_mask_low <<= INTVAL (shiftop);
7037 lsb = low & -low;
7039 if (-lsb != shift_mask_low)
7040 return 0;
7042 if (HOST_BITS_PER_WIDE_INT < 64)
7043 high = ~high;
7044 low = ~low;
7045 low &= -lsb;
7047 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
7049 lsb = high & -high;
7050 return high == -lsb;
7053 lsb = low & -low;
7054 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
7056 else
7057 return 0;
7060 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
7061 to perform a left shift. It must have SHIFTOP or more least
7062 signifigant 0's, with the remainder of the word 1's. */
7065 includes_rldicr_lshift_p (shiftop, andop)
7066 rtx shiftop;
7067 rtx andop;
7069 if (GET_CODE (andop) == CONST_INT)
7071 HOST_WIDE_INT c, lsb, shift_mask;
7073 shift_mask = ~0;
7074 shift_mask <<= INTVAL (shiftop);
7075 c = INTVAL (andop);
7077 /* Find the least signifigant one bit. */
7078 lsb = c & -c;
7080 /* It must be covered by the shift mask.
7081 This test also rejects c == 0. */
7082 if ((lsb & shift_mask) == 0)
7083 return 0;
7085 /* Check we have all 1's above the transition, and reject all 1's. */
7086 return c == -lsb && lsb != 1;
7088 else if (GET_CODE (andop) == CONST_DOUBLE
7089 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
7091 HOST_WIDE_INT low, lsb, shift_mask_low;
7093 low = CONST_DOUBLE_LOW (andop);
7095 if (HOST_BITS_PER_WIDE_INT < 64)
7097 HOST_WIDE_INT high, shift_mask_high;
7099 high = CONST_DOUBLE_HIGH (andop);
7101 if (low == 0)
7103 shift_mask_high = ~0;
7104 if (INTVAL (shiftop) > 32)
7105 shift_mask_high <<= INTVAL (shiftop) - 32;
7107 lsb = high & -high;
7109 if ((lsb & shift_mask_high) == 0)
7110 return 0;
7112 return high == -lsb;
7114 if (high != ~0)
7115 return 0;
7118 shift_mask_low = ~0;
7119 shift_mask_low <<= INTVAL (shiftop);
7121 lsb = low & -low;
7123 if ((lsb & shift_mask_low) == 0)
7124 return 0;
7126 return low == -lsb && lsb != 1;
7128 else
7129 return 0;
7132 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
7133 for lfq and stfq insns.
7135 Note reg1 and reg2 *must* be hard registers. To be sure we will
7136 abort if we are passed pseudo registers. */
7139 registers_ok_for_quad_peep (reg1, reg2)
7140 rtx reg1, reg2;
7142 /* We might have been passed a SUBREG. */
7143 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
7144 return 0;
7146 return (REGNO (reg1) == REGNO (reg2) - 1);
7149 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
7150 addr1 and addr2 must be in consecutive memory locations
7151 (addr2 == addr1 + 8). */
7154 addrs_ok_for_quad_peep (addr1, addr2)
7155 rtx addr1;
7156 rtx addr2;
7158 unsigned int reg1;
7159 int offset1;
7161 /* Extract an offset (if used) from the first addr. */
7162 if (GET_CODE (addr1) == PLUS)
7164 /* If not a REG, return zero. */
7165 if (GET_CODE (XEXP (addr1, 0)) != REG)
7166 return 0;
7167 else
7169 reg1 = REGNO (XEXP (addr1, 0));
7170 /* The offset must be constant! */
7171 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
7172 return 0;
7173 offset1 = INTVAL (XEXP (addr1, 1));
7176 else if (GET_CODE (addr1) != REG)
7177 return 0;
7178 else
7180 reg1 = REGNO (addr1);
7181 /* This was a simple (mem (reg)) expression. Offset is 0. */
7182 offset1 = 0;
7185 /* Make sure the second address is a (mem (plus (reg) (const_int))). */
7186 if (GET_CODE (addr2) != PLUS)
7187 return 0;
7189 if (GET_CODE (XEXP (addr2, 0)) != REG
7190 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
7191 return 0;
7193 if (reg1 != REGNO (XEXP (addr2, 0)))
7194 return 0;
7196 /* The offset for the second addr must be 8 more than the first addr. */
7197 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
7198 return 0;
7200 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
7201 instructions. */
7202 return 1;
7205 /* Return the register class of a scratch register needed to copy IN into
7206 or out of a register in CLASS in MODE. If it can be done directly,
7207 NO_REGS is returned. */
7209 enum reg_class
7210 secondary_reload_class (class, mode, in)
7211 enum reg_class class;
7212 enum machine_mode mode ATTRIBUTE_UNUSED;
7213 rtx in;
7215 int regno;
7217 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN && flag_pic))
7219 /* We cannot copy a symbolic operand directly into anything
7220 other than BASE_REGS for TARGET_ELF. So indicate that a
7221 register from BASE_REGS is needed as an intermediate
7222 register.
7224 On Darwin, pic addresses require a load from memory, which
7225 needs a base register. */
7226 if (class != BASE_REGS
7227 && (GET_CODE (in) == SYMBOL_REF
7228 || GET_CODE (in) == HIGH
7229 || GET_CODE (in) == LABEL_REF
7230 || GET_CODE (in) == CONST))
7231 return BASE_REGS;
7234 if (GET_CODE (in) == REG)
7236 regno = REGNO (in);
7237 if (regno >= FIRST_PSEUDO_REGISTER)
7239 regno = true_regnum (in);
7240 if (regno >= FIRST_PSEUDO_REGISTER)
7241 regno = -1;
7244 else if (GET_CODE (in) == SUBREG)
7246 regno = true_regnum (in);
7247 if (regno >= FIRST_PSEUDO_REGISTER)
7248 regno = -1;
7250 else
7251 regno = -1;
7253 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
7254 into anything. */
7255 if (class == GENERAL_REGS || class == BASE_REGS
7256 || (regno >= 0 && INT_REGNO_P (regno)))
7257 return NO_REGS;
7259 /* Constants, memory, and FP registers can go into FP registers. */
7260 if ((regno == -1 || FP_REGNO_P (regno))
7261 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
7262 return NO_REGS;
7264 /* Memory, and AltiVec registers can go into AltiVec registers. */
7265 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
7266 && class == ALTIVEC_REGS)
7267 return NO_REGS;
7269 /* We can copy among the CR registers. */
7270 if ((class == CR_REGS || class == CR0_REGS)
7271 && regno >= 0 && CR_REGNO_P (regno))
7272 return NO_REGS;
7274 /* Otherwise, we need GENERAL_REGS. */
7275 return GENERAL_REGS;
7278 /* Given a comparison operation, return the bit number in CCR to test. We
7279 know this is a valid comparison.
7281 SCC_P is 1 if this is for an scc. That means that %D will have been
7282 used instead of %C, so the bits will be in different places.
7284 Return -1 if OP isn't a valid comparison for some reason. */
7287 ccr_bit (op, scc_p)
7288 rtx op;
7289 int scc_p;
7291 enum rtx_code code = GET_CODE (op);
7292 enum machine_mode cc_mode;
7293 int cc_regnum;
7294 int base_bit;
7295 rtx reg;
7297 if (GET_RTX_CLASS (code) != '<')
7298 return -1;
7300 reg = XEXP (op, 0);
7302 if (GET_CODE (reg) != REG
7303 || ! CR_REGNO_P (REGNO (reg)))
7304 abort ();
7306 cc_mode = GET_MODE (reg);
7307 cc_regnum = REGNO (reg);
7308 base_bit = 4 * (cc_regnum - CR0_REGNO);
7310 validate_condition_mode (code, cc_mode);
7312 switch (code)
7314 case NE:
7315 if (TARGET_SPE && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
7316 return base_bit + 1;
7317 return scc_p ? base_bit + 3 : base_bit + 2;
7318 case EQ:
7319 if (TARGET_SPE && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
7320 return base_bit + 1;
7321 return base_bit + 2;
7322 case GT: case GTU: case UNLE:
7323 return base_bit + 1;
7324 case LT: case LTU: case UNGE:
7325 return base_bit;
7326 case ORDERED: case UNORDERED:
7327 return base_bit + 3;
7329 case GE: case GEU:
7330 /* If scc, we will have done a cror to put the bit in the
7331 unordered position. So test that bit. For integer, this is ! LT
7332 unless this is an scc insn. */
7333 return scc_p ? base_bit + 3 : base_bit;
7335 case LE: case LEU:
7336 return scc_p ? base_bit + 3 : base_bit + 1;
7338 default:
7339 abort ();
7343 /* Return the GOT register. */
7345 struct rtx_def *
7346 rs6000_got_register (value)
7347 rtx value ATTRIBUTE_UNUSED;
7349 /* The second flow pass currently (June 1999) can't update
7350 regs_ever_live without disturbing other parts of the compiler, so
7351 update it here to make the prolog/epilogue code happy. */
7352 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
7353 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
7355 current_function_uses_pic_offset_table = 1;
7357 return pic_offset_table_rtx;
7360 /* Function to init struct machine_function.
7361 This will be called, via a pointer variable,
7362 from push_function_context. */
7364 static struct machine_function *
7365 rs6000_init_machine_status ()
7367 return ggc_alloc_cleared (sizeof (machine_function));
7370 /* These macros test for integers and extract the low-order bits. */
7371 #define INT_P(X) \
7372 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
7373 && GET_MODE (X) == VOIDmode)
7375 #define INT_LOWPART(X) \
7376 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
7379 extract_MB (op)
7380 rtx op;
7382 int i;
7383 unsigned long val = INT_LOWPART (op);
7385 /* If the high bit is zero, the value is the first 1 bit we find
7386 from the left. */
7387 if ((val & 0x80000000) == 0)
7389 if ((val & 0xffffffff) == 0)
7390 abort ();
7392 i = 1;
7393 while (((val <<= 1) & 0x80000000) == 0)
7394 ++i;
7395 return i;
7398 /* If the high bit is set and the low bit is not, or the mask is all
7399 1's, the value is zero. */
7400 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
7401 return 0;
7403 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7404 from the right. */
7405 i = 31;
7406 while (((val >>= 1) & 1) != 0)
7407 --i;
7409 return i;
7413 extract_ME (op)
7414 rtx op;
7416 int i;
7417 unsigned long val = INT_LOWPART (op);
7419 /* If the low bit is zero, the value is the first 1 bit we find from
7420 the right. */
7421 if ((val & 1) == 0)
7423 if ((val & 0xffffffff) == 0)
7424 abort ();
7426 i = 30;
7427 while (((val >>= 1) & 1) == 0)
7428 --i;
7430 return i;
7433 /* If the low bit is set and the high bit is not, or the mask is all
7434 1's, the value is 31. */
7435 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
7436 return 31;
7438 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7439 from the left. */
7440 i = 0;
7441 while (((val <<= 1) & 0x80000000) != 0)
7442 ++i;
7444 return i;
7447 /* Print an operand. Recognize special options, documented below. */
7449 #if TARGET_ELF
7450 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
7451 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
7452 #else
7453 #define SMALL_DATA_RELOC "sda21"
7454 #define SMALL_DATA_REG 0
7455 #endif
7457 void
7458 print_operand (file, x, code)
7459 FILE *file;
7460 rtx x;
7461 int code;
7463 int i;
7464 HOST_WIDE_INT val;
7465 unsigned HOST_WIDE_INT uval;
7467 switch (code)
7469 case '.':
7470 /* Write out an instruction after the call which may be replaced
7471 with glue code by the loader. This depends on the AIX version. */
7472 asm_fprintf (file, RS6000_CALL_GLUE);
7473 return;
7475 /* %a is output_address. */
7477 case 'A':
7478 /* If X is a constant integer whose low-order 5 bits are zero,
7479 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
7480 in the AIX assembler where "sri" with a zero shift count
7481 writes a trash instruction. */
7482 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
7483 putc ('l', file);
7484 else
7485 putc ('r', file);
7486 return;
7488 case 'b':
7489 /* If constant, low-order 16 bits of constant, unsigned.
7490 Otherwise, write normally. */
7491 if (INT_P (x))
7492 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
7493 else
7494 print_operand (file, x, 0);
7495 return;
7497 case 'B':
7498 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
7499 for 64-bit mask direction. */
7500 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
7501 return;
7503 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
7504 output_operand. */
7506 case 'D':
7507 /* There used to be a comment for 'C' reading "This is an
7508 optional cror needed for certain floating-point
7509 comparisons. Otherwise write nothing." */
7511 /* Similar, except that this is for an scc, so we must be able to
7512 encode the test in a single bit that is one. We do the above
7513 for any LE, GE, GEU, or LEU and invert the bit for NE. */
7514 if (GET_CODE (x) == LE || GET_CODE (x) == GE
7515 || GET_CODE (x) == LEU || GET_CODE (x) == GEU)
7517 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
7519 fprintf (file, "cror %d,%d,%d\n\t", base_bit + 3,
7520 base_bit + 2,
7521 base_bit + (GET_CODE (x) == GE || GET_CODE (x) == GEU));
7524 else if (GET_CODE (x) == NE)
7526 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
7528 fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 3,
7529 base_bit + 2, base_bit + 2);
7531 else if (TARGET_SPE && TARGET_HARD_FLOAT
7532 && GET_CODE (x) == EQ
7533 && GET_MODE (XEXP (x, 0)) == CCFPmode)
7535 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
7537 fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 1,
7538 base_bit + 1, base_bit + 1);
7540 return;
7542 case 'E':
7543 /* X is a CR register. Print the number of the EQ bit of the CR */
7544 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7545 output_operand_lossage ("invalid %%E value");
7546 else
7547 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
7548 return;
7550 case 'f':
7551 /* X is a CR register. Print the shift count needed to move it
7552 to the high-order four bits. */
7553 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7554 output_operand_lossage ("invalid %%f value");
7555 else
7556 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
7557 return;
7559 case 'F':
7560 /* Similar, but print the count for the rotate in the opposite
7561 direction. */
7562 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7563 output_operand_lossage ("invalid %%F value");
7564 else
7565 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
7566 return;
7568 case 'G':
7569 /* X is a constant integer. If it is negative, print "m",
7570 otherwise print "z". This is to make an aze or ame insn. */
7571 if (GET_CODE (x) != CONST_INT)
7572 output_operand_lossage ("invalid %%G value");
7573 else if (INTVAL (x) >= 0)
7574 putc ('z', file);
7575 else
7576 putc ('m', file);
7577 return;
7579 case 'h':
7580 /* If constant, output low-order five bits. Otherwise, write
7581 normally. */
7582 if (INT_P (x))
7583 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
7584 else
7585 print_operand (file, x, 0);
7586 return;
7588 case 'H':
7589 /* If constant, output low-order six bits. Otherwise, write
7590 normally. */
7591 if (INT_P (x))
7592 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
7593 else
7594 print_operand (file, x, 0);
7595 return;
7597 case 'I':
7598 /* Print `i' if this is a constant, else nothing. */
7599 if (INT_P (x))
7600 putc ('i', file);
7601 return;
7603 case 'j':
7604 /* Write the bit number in CCR for jump. */
7605 i = ccr_bit (x, 0);
7606 if (i == -1)
7607 output_operand_lossage ("invalid %%j code");
7608 else
7609 fprintf (file, "%d", i);
7610 return;
7612 case 'J':
7613 /* Similar, but add one for shift count in rlinm for scc and pass
7614 scc flag to `ccr_bit'. */
7615 i = ccr_bit (x, 1);
7616 if (i == -1)
7617 output_operand_lossage ("invalid %%J code");
7618 else
7619 /* If we want bit 31, write a shift count of zero, not 32. */
7620 fprintf (file, "%d", i == 31 ? 0 : i + 1);
7621 return;
7623 case 'k':
7624 /* X must be a constant. Write the 1's complement of the
7625 constant. */
7626 if (! INT_P (x))
7627 output_operand_lossage ("invalid %%k value");
7628 else
7629 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
7630 return;
7632 case 'K':
7633 /* X must be a symbolic constant on ELF. Write an
7634 expression suitable for an 'addi' that adds in the low 16
7635 bits of the MEM. */
7636 if (GET_CODE (x) != CONST)
7638 print_operand_address (file, x);
7639 fputs ("@l", file);
7641 else
7643 if (GET_CODE (XEXP (x, 0)) != PLUS
7644 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
7645 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
7646 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
7647 output_operand_lossage ("invalid %%K value");
7648 print_operand_address (file, XEXP (XEXP (x, 0), 0));
7649 fputs ("@l", file);
7650 /* For GNU as, there must be a non-alphanumeric character
7651 between 'l' and the number. The '-' is added by
7652 print_operand() already. */
7653 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
7654 fputs ("+", file);
7655 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
7657 return;
7659 /* %l is output_asm_label. */
7661 case 'L':
7662 /* Write second word of DImode or DFmode reference. Works on register
7663 or non-indexed memory only. */
7664 if (GET_CODE (x) == REG)
7665 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
7666 else if (GET_CODE (x) == MEM)
7668 /* Handle possible auto-increment. Since it is pre-increment and
7669 we have already done it, we can just use an offset of word. */
7670 if (GET_CODE (XEXP (x, 0)) == PRE_INC
7671 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
7672 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
7673 UNITS_PER_WORD));
7674 else
7675 output_address (XEXP (adjust_address_nv (x, SImode,
7676 UNITS_PER_WORD),
7677 0));
7679 if (small_data_operand (x, GET_MODE (x)))
7680 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
7681 reg_names[SMALL_DATA_REG]);
7683 return;
7685 case 'm':
7686 /* MB value for a mask operand. */
7687 if (! mask_operand (x, SImode))
7688 output_operand_lossage ("invalid %%m value");
7690 fprintf (file, "%d", extract_MB (x));
7691 return;
7693 case 'M':
7694 /* ME value for a mask operand. */
7695 if (! mask_operand (x, SImode))
7696 output_operand_lossage ("invalid %%M value");
7698 fprintf (file, "%d", extract_ME (x));
7699 return;
7701 /* %n outputs the negative of its operand. */
7703 case 'N':
7704 /* Write the number of elements in the vector times 4. */
7705 if (GET_CODE (x) != PARALLEL)
7706 output_operand_lossage ("invalid %%N value");
7707 else
7708 fprintf (file, "%d", XVECLEN (x, 0) * 4);
7709 return;
7711 case 'O':
7712 /* Similar, but subtract 1 first. */
7713 if (GET_CODE (x) != PARALLEL)
7714 output_operand_lossage ("invalid %%O value");
7715 else
7716 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
7717 return;
7719 case 'p':
7720 /* X is a CONST_INT that is a power of two. Output the logarithm. */
7721 if (! INT_P (x)
7722 || INT_LOWPART (x) < 0
7723 || (i = exact_log2 (INT_LOWPART (x))) < 0)
7724 output_operand_lossage ("invalid %%p value");
7725 else
7726 fprintf (file, "%d", i);
7727 return;
7729 case 'P':
7730 /* The operand must be an indirect memory reference. The result
7731 is the register number. */
7732 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
7733 || REGNO (XEXP (x, 0)) >= 32)
7734 output_operand_lossage ("invalid %%P value");
7735 else
7736 fprintf (file, "%d", REGNO (XEXP (x, 0)));
7737 return;
7739 case 'q':
7740 /* This outputs the logical code corresponding to a boolean
7741 expression. The expression may have one or both operands
7742 negated (if one, only the first one). For condition register
7743 logical operations, it will also treat the negated
7744 CR codes as NOTs, but not handle NOTs of them. */
7746 const char *const *t = 0;
7747 const char *s;
7748 enum rtx_code code = GET_CODE (x);
7749 static const char * const tbl[3][3] = {
7750 { "and", "andc", "nor" },
7751 { "or", "orc", "nand" },
7752 { "xor", "eqv", "xor" } };
7754 if (code == AND)
7755 t = tbl[0];
7756 else if (code == IOR)
7757 t = tbl[1];
7758 else if (code == XOR)
7759 t = tbl[2];
7760 else
7761 output_operand_lossage ("invalid %%q value");
7763 if (GET_CODE (XEXP (x, 0)) != NOT)
7764 s = t[0];
7765 else
7767 if (GET_CODE (XEXP (x, 1)) == NOT)
7768 s = t[2];
7769 else
7770 s = t[1];
7773 fputs (s, file);
7775 return;
7777 case 'R':
7778 /* X is a CR register. Print the mask for `mtcrf'. */
7779 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7780 output_operand_lossage ("invalid %%R value");
7781 else
7782 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
7783 return;
7785 case 's':
7786 /* Low 5 bits of 32 - value */
7787 if (! INT_P (x))
7788 output_operand_lossage ("invalid %%s value");
7789 else
7790 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
7791 return;
7793 case 'S':
7794 /* PowerPC64 mask position. All 0's is excluded.
7795 CONST_INT 32-bit mask is considered sign-extended so any
7796 transition must occur within the CONST_INT, not on the boundary. */
7797 if (! mask64_operand (x, DImode))
7798 output_operand_lossage ("invalid %%S value");
7800 uval = INT_LOWPART (x);
7802 if (uval & 1) /* Clear Left */
7804 #if HOST_BITS_PER_WIDE_INT > 64
7805 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
7806 #endif
7807 i = 64;
7809 else /* Clear Right */
7811 uval = ~uval;
7812 #if HOST_BITS_PER_WIDE_INT > 64
7813 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
7814 #endif
7815 i = 63;
7817 while (uval != 0)
7818 --i, uval >>= 1;
7819 if (i < 0)
7820 abort ();
7821 fprintf (file, "%d", i);
7822 return;
7824 case 't':
7825 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
7826 if (GET_CODE (x) != REG || GET_MODE (x) != CCmode)
7827 abort ();
7829 /* Bit 3 is OV bit. */
7830 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
7832 /* If we want bit 31, write a shift count of zero, not 32. */
7833 fprintf (file, "%d", i == 31 ? 0 : i + 1);
7834 return;
7836 case 'T':
7837 /* Print the symbolic name of a branch target register. */
7838 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
7839 && REGNO (x) != COUNT_REGISTER_REGNUM))
7840 output_operand_lossage ("invalid %%T value");
7841 else if (REGNO (x) == LINK_REGISTER_REGNUM)
7842 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
7843 else
7844 fputs ("ctr", file);
7845 return;
7847 case 'u':
7848 /* High-order 16 bits of constant for use in unsigned operand. */
7849 if (! INT_P (x))
7850 output_operand_lossage ("invalid %%u value");
7851 else
7852 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
7853 (INT_LOWPART (x) >> 16) & 0xffff);
7854 return;
7856 case 'v':
7857 /* High-order 16 bits of constant for use in signed operand. */
7858 if (! INT_P (x))
7859 output_operand_lossage ("invalid %%v value");
7860 else
7861 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
7862 (INT_LOWPART (x) >> 16) & 0xffff);
7863 return;
7865 case 'U':
7866 /* Print `u' if this has an auto-increment or auto-decrement. */
7867 if (GET_CODE (x) == MEM
7868 && (GET_CODE (XEXP (x, 0)) == PRE_INC
7869 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
7870 putc ('u', file);
7871 return;
7873 case 'V':
7874 /* Print the trap code for this operand. */
7875 switch (GET_CODE (x))
7877 case EQ:
7878 fputs ("eq", file); /* 4 */
7879 break;
7880 case NE:
7881 fputs ("ne", file); /* 24 */
7882 break;
7883 case LT:
7884 fputs ("lt", file); /* 16 */
7885 break;
7886 case LE:
7887 fputs ("le", file); /* 20 */
7888 break;
7889 case GT:
7890 fputs ("gt", file); /* 8 */
7891 break;
7892 case GE:
7893 fputs ("ge", file); /* 12 */
7894 break;
7895 case LTU:
7896 fputs ("llt", file); /* 2 */
7897 break;
7898 case LEU:
7899 fputs ("lle", file); /* 6 */
7900 break;
7901 case GTU:
7902 fputs ("lgt", file); /* 1 */
7903 break;
7904 case GEU:
7905 fputs ("lge", file); /* 5 */
7906 break;
7907 default:
7908 abort ();
7910 break;
7912 case 'w':
7913 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
7914 normally. */
7915 if (INT_P (x))
7916 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
7917 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
7918 else
7919 print_operand (file, x, 0);
7920 return;
7922 case 'W':
7923 /* MB value for a PowerPC64 rldic operand. */
7924 val = (GET_CODE (x) == CONST_INT
7925 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
7927 if (val < 0)
7928 i = -1;
7929 else
7930 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
7931 if ((val <<= 1) < 0)
7932 break;
7934 #if HOST_BITS_PER_WIDE_INT == 32
7935 if (GET_CODE (x) == CONST_INT && i >= 0)
7936 i += 32; /* zero-extend high-part was all 0's */
7937 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
7939 val = CONST_DOUBLE_LOW (x);
7941 if (val == 0)
7942 abort ();
7943 else if (val < 0)
7944 --i;
7945 else
7946 for ( ; i < 64; i++)
7947 if ((val <<= 1) < 0)
7948 break;
7950 #endif
7952 fprintf (file, "%d", i + 1);
7953 return;
7955 case 'X':
7956 if (GET_CODE (x) == MEM
7957 && LEGITIMATE_INDEXED_ADDRESS_P (XEXP (x, 0), 0))
7958 putc ('x', file);
7959 return;
7961 case 'Y':
7962 /* Like 'L', for third word of TImode */
7963 if (GET_CODE (x) == REG)
7964 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
7965 else if (GET_CODE (x) == MEM)
7967 if (GET_CODE (XEXP (x, 0)) == PRE_INC
7968 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
7969 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
7970 else
7971 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
7972 if (small_data_operand (x, GET_MODE (x)))
7973 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
7974 reg_names[SMALL_DATA_REG]);
7976 return;
7978 case 'z':
7979 /* X is a SYMBOL_REF. Write out the name preceded by a
7980 period and without any trailing data in brackets. Used for function
7981 names. If we are configured for System V (or the embedded ABI) on
7982 the PowerPC, do not emit the period, since those systems do not use
7983 TOCs and the like. */
7984 if (GET_CODE (x) != SYMBOL_REF)
7985 abort ();
7987 if (XSTR (x, 0)[0] != '.')
7989 switch (DEFAULT_ABI)
7991 default:
7992 abort ();
7994 case ABI_AIX:
7995 putc ('.', file);
7996 break;
7998 case ABI_V4:
7999 case ABI_AIX_NODESC:
8000 case ABI_DARWIN:
8001 break;
8004 #if TARGET_AIX
8005 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
8006 #else
8007 assemble_name (file, XSTR (x, 0));
8008 #endif
8009 return;
8011 case 'Z':
8012 /* Like 'L', for last word of TImode. */
8013 if (GET_CODE (x) == REG)
8014 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
8015 else if (GET_CODE (x) == MEM)
8017 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8018 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8019 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
8020 else
8021 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
8022 if (small_data_operand (x, GET_MODE (x)))
8023 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8024 reg_names[SMALL_DATA_REG]);
8026 return;
8028 /* Print AltiVec or SPE memory operand. */
8029 case 'y':
8031 rtx tmp;
8033 if (GET_CODE (x) != MEM)
8034 abort ();
8036 tmp = XEXP (x, 0);
8038 if (TARGET_SPE)
8040 /* Handle [reg]. */
8041 if (GET_CODE (tmp) == REG)
8043 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
8044 break;
8046 /* Handle [reg+UIMM]. */
8047 else if (GET_CODE (tmp) == PLUS &&
8048 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
8050 int x;
8052 if (GET_CODE (XEXP (tmp, 0)) != REG)
8053 abort ();
8055 x = INTVAL (XEXP (tmp, 1));
8056 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
8057 break;
8060 /* Fall through. Must be [reg+reg]. */
8062 if (GET_CODE (tmp) == REG)
8063 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
8064 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
8066 if (REGNO (XEXP (tmp, 0)) == 0)
8067 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
8068 reg_names[ REGNO (XEXP (tmp, 0)) ]);
8069 else
8070 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
8071 reg_names[ REGNO (XEXP (tmp, 1)) ]);
8073 else
8074 abort ();
8075 break;
8078 case 0:
8079 if (GET_CODE (x) == REG)
8080 fprintf (file, "%s", reg_names[REGNO (x)]);
8081 else if (GET_CODE (x) == MEM)
8083 /* We need to handle PRE_INC and PRE_DEC here, since we need to
8084 know the width from the mode. */
8085 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
8086 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
8087 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
8088 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
8089 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
8090 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
8091 else
8092 output_address (XEXP (x, 0));
8094 else
8095 output_addr_const (file, x);
8096 return;
8098 default:
8099 output_operand_lossage ("invalid %%xn code");
8103 /* Print the address of an operand. */
8105 void
8106 print_operand_address (file, x)
8107 FILE *file;
8108 rtx x;
8110 if (GET_CODE (x) == REG)
8111 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
8112 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
8113 || GET_CODE (x) == LABEL_REF)
8115 output_addr_const (file, x);
8116 if (small_data_operand (x, GET_MODE (x)))
8117 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8118 reg_names[SMALL_DATA_REG]);
8119 else if (TARGET_TOC)
8120 abort ();
8122 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
8124 if (REGNO (XEXP (x, 0)) == 0)
8125 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
8126 reg_names[ REGNO (XEXP (x, 0)) ]);
8127 else
8128 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
8129 reg_names[ REGNO (XEXP (x, 1)) ]);
8131 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
8133 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (XEXP (x, 1)));
8134 fprintf (file, "(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8136 #if TARGET_ELF
8137 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
8138 && CONSTANT_P (XEXP (x, 1)))
8140 output_addr_const (file, XEXP (x, 1));
8141 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8143 #endif
8144 #if TARGET_MACHO
8145 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
8146 && CONSTANT_P (XEXP (x, 1)))
8148 fprintf (file, "lo16(");
8149 output_addr_const (file, XEXP (x, 1));
8150 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8152 #endif
8153 else if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
8155 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
8157 rtx contains_minus = XEXP (x, 1);
8158 rtx minus, symref;
8159 const char *name;
8161 /* Find the (minus (sym) (toc)) buried in X, and temporarily
8162 turn it into (sym) for output_addr_const. */
8163 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
8164 contains_minus = XEXP (contains_minus, 0);
8166 minus = XEXP (contains_minus, 0);
8167 symref = XEXP (minus, 0);
8168 XEXP (contains_minus, 0) = symref;
8169 if (TARGET_ELF)
8171 char *newname;
8173 name = XSTR (symref, 0);
8174 newname = alloca (strlen (name) + sizeof ("@toc"));
8175 strcpy (newname, name);
8176 strcat (newname, "@toc");
8177 XSTR (symref, 0) = newname;
8179 output_addr_const (file, XEXP (x, 1));
8180 if (TARGET_ELF)
8181 XSTR (symref, 0) = name;
8182 XEXP (contains_minus, 0) = minus;
8184 else
8185 output_addr_const (file, XEXP (x, 1));
8187 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
8189 else
8190 abort ();
8193 /* Target hook for assembling integer objects. The PowerPC version has
8194 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
8195 is defined. It also needs to handle DI-mode objects on 64-bit
8196 targets. */
8198 static bool
8199 rs6000_assemble_integer (x, size, aligned_p)
8200 rtx x;
8201 unsigned int size;
8202 int aligned_p;
8204 #ifdef RELOCATABLE_NEEDS_FIXUP
8205 /* Special handling for SI values. */
8206 if (size == 4 && aligned_p)
8208 extern int in_toc_section PARAMS ((void));
8209 static int recurse = 0;
8211 /* For -mrelocatable, we mark all addresses that need to be fixed up
8212 in the .fixup section. */
8213 if (TARGET_RELOCATABLE
8214 && !in_toc_section ()
8215 && !in_text_section ()
8216 && !recurse
8217 && GET_CODE (x) != CONST_INT
8218 && GET_CODE (x) != CONST_DOUBLE
8219 && CONSTANT_P (x))
8221 char buf[256];
8223 recurse = 1;
8224 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
8225 fixuplabelno++;
8226 ASM_OUTPUT_LABEL (asm_out_file, buf);
8227 fprintf (asm_out_file, "\t.long\t(");
8228 output_addr_const (asm_out_file, x);
8229 fprintf (asm_out_file, ")@fixup\n");
8230 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
8231 ASM_OUTPUT_ALIGN (asm_out_file, 2);
8232 fprintf (asm_out_file, "\t.long\t");
8233 assemble_name (asm_out_file, buf);
8234 fprintf (asm_out_file, "\n\t.previous\n");
8235 recurse = 0;
8236 return true;
8238 /* Remove initial .'s to turn a -mcall-aixdesc function
8239 address into the address of the descriptor, not the function
8240 itself. */
8241 else if (GET_CODE (x) == SYMBOL_REF
8242 && XSTR (x, 0)[0] == '.'
8243 && DEFAULT_ABI == ABI_AIX)
8245 const char *name = XSTR (x, 0);
8246 while (*name == '.')
8247 name++;
8249 fprintf (asm_out_file, "\t.long\t%s\n", name);
8250 return true;
8253 #endif /* RELOCATABLE_NEEDS_FIXUP */
8254 return default_assemble_integer (x, size, aligned_p);
8257 #ifdef HAVE_GAS_HIDDEN
8258 /* Emit an assembler directive to set symbol visibility for DECL to
8259 VISIBILITY_TYPE. */
8261 static void
8262 rs6000_assemble_visibility (decl, vis)
8263 tree decl;
8264 int vis;
8266 /* Functions need to have their entry point symbol visibility set as
8267 well as their descriptor symbol visibility. */
8268 if (DEFAULT_ABI == ABI_AIX && TREE_CODE (decl) == FUNCTION_DECL)
8270 static const char * const visibility_types[] = {
8271 NULL, "internal", "hidden", "protected"
8274 const char *name, *type;
8276 name = ((* targetm.strip_name_encoding)
8277 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
8278 type = visibility_types[vis];
8280 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
8281 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
8283 else
8284 default_assemble_visibility (decl, vis);
8286 #endif
8288 enum rtx_code
8289 rs6000_reverse_condition (mode, code)
8290 enum machine_mode mode;
8291 enum rtx_code code;
8293 /* Reversal of FP compares takes care -- an ordered compare
8294 becomes an unordered compare and vice versa. */
8295 if (mode == CCFPmode && !flag_unsafe_math_optimizations)
8296 return reverse_condition_maybe_unordered (code);
8297 else
8298 return reverse_condition (code);
8301 /* Generate a compare for CODE. Return a brand-new rtx that
8302 represents the result of the compare. */
8304 static rtx
8305 rs6000_generate_compare (code)
8306 enum rtx_code code;
8308 enum machine_mode comp_mode;
8309 rtx compare_result;
8311 if (rs6000_compare_fp_p)
8312 comp_mode = CCFPmode;
8313 else if (code == GTU || code == LTU
8314 || code == GEU || code == LEU)
8315 comp_mode = CCUNSmode;
8316 else
8317 comp_mode = CCmode;
8319 /* First, the compare. */
8320 compare_result = gen_reg_rtx (comp_mode);
8322 /* SPE FP compare instructions on the GPRs. Yuck! */
8323 if ((TARGET_SPE && TARGET_HARD_FLOAT) && rs6000_compare_fp_p)
8325 rtx cmp, or1, or2, or_result, compare_result2;
8327 switch (code)
8329 case EQ:
8330 case UNEQ:
8331 case NE:
8332 case LTGT:
8333 cmp = flag_unsafe_math_optimizations
8334 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
8335 rs6000_compare_op1)
8336 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
8337 rs6000_compare_op1);
8338 break;
8339 case GT:
8340 case GTU:
8341 case UNGT:
8342 case UNGE:
8343 case GE:
8344 case GEU:
8345 cmp = flag_unsafe_math_optimizations
8346 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
8347 rs6000_compare_op1)
8348 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
8349 rs6000_compare_op1);
8350 break;
8351 case LT:
8352 case LTU:
8353 case UNLT:
8354 case UNLE:
8355 case LE:
8356 case LEU:
8357 cmp = flag_unsafe_math_optimizations
8358 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
8359 rs6000_compare_op1)
8360 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
8361 rs6000_compare_op1);
8362 break;
8363 default:
8364 abort ();
8367 /* Synthesize LE and GE from LT/GT || EQ. */
8368 if (code == LE || code == GE || code == LEU || code == GEU)
8370 /* Synthesize GE/LE frome GT/LT || EQ. */
8372 emit_insn (cmp);
8374 switch (code)
8376 case LE: code = LT; break;
8377 case GE: code = GT; break;
8378 case LEU: code = LT; break;
8379 case GEU: code = GT; break;
8380 default: abort ();
8383 or1 = gen_reg_rtx (SImode);
8384 or2 = gen_reg_rtx (SImode);
8385 or_result = gen_reg_rtx (CCEQmode);
8386 compare_result2 = gen_reg_rtx (CCFPmode);
8388 /* Do the EQ. */
8389 cmp = flag_unsafe_math_optimizations
8390 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
8391 rs6000_compare_op1)
8392 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
8393 rs6000_compare_op1);
8394 emit_insn (cmp);
8396 /* The MC8540 FP compare instructions set the CR bits
8397 differently than other PPC compare instructions. For
8398 that matter, there is no generic test instruction, but a
8399 testgt, testlt, and testeq. For a true condition, bit 2
8400 is set (x1xx) in the CR. Following the traditional CR
8401 values:
8403 LT GT EQ OV
8404 bit3 bit2 bit1 bit0
8406 ... bit 2 would be a GT CR alias, so later on we
8407 look in the GT bits for the branch instructions.
8408 However, we must be careful to emit correct RTL in
8409 the meantime, so optimizations don't get confused. */
8411 or1 = gen_rtx (NE, SImode, compare_result, const0_rtx);
8412 or2 = gen_rtx (NE, SImode, compare_result2, const0_rtx);
8414 /* OR them together. */
8415 cmp = gen_rtx_SET (VOIDmode, or_result,
8416 gen_rtx_COMPARE (CCEQmode,
8417 gen_rtx_IOR (SImode, or1, or2),
8418 const_true_rtx));
8419 compare_result = or_result;
8420 code = EQ;
8422 else
8424 /* We only care about 1 bit (x1xx), so map everything to NE to
8425 maintain rtl sanity. We'll get to the right bit (x1xx) at
8426 code output time. */
8427 if (code == NE || code == LTGT)
8428 /* Do the inverse here because we have no cmpne
8429 instruction. We use the cmpeq instruction and expect
8430 to get a 0 instead. */
8431 code = EQ;
8432 else
8433 code = NE;
8436 emit_insn (cmp);
8438 else
8439 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
8440 gen_rtx_COMPARE (comp_mode,
8441 rs6000_compare_op0,
8442 rs6000_compare_op1)));
8444 /* Some kinds of FP comparisons need an OR operation;
8445 except for flag_unsafe_math_optimizations we don't bother. */
8446 if (rs6000_compare_fp_p
8447 && ! flag_unsafe_math_optimizations
8448 && ! (TARGET_HARD_FLOAT && TARGET_SPE)
8449 && (code == LE || code == GE
8450 || code == UNEQ || code == LTGT
8451 || code == UNGT || code == UNLT))
8453 enum rtx_code or1, or2;
8454 rtx or1_rtx, or2_rtx, compare2_rtx;
8455 rtx or_result = gen_reg_rtx (CCEQmode);
8457 switch (code)
8459 case LE: or1 = LT; or2 = EQ; break;
8460 case GE: or1 = GT; or2 = EQ; break;
8461 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
8462 case LTGT: or1 = LT; or2 = GT; break;
8463 case UNGT: or1 = UNORDERED; or2 = GT; break;
8464 case UNLT: or1 = UNORDERED; or2 = LT; break;
8465 default: abort ();
8467 validate_condition_mode (or1, comp_mode);
8468 validate_condition_mode (or2, comp_mode);
8469 or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
8470 or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
8471 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
8472 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
8473 const_true_rtx);
8474 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
8476 compare_result = or_result;
8477 code = EQ;
8480 validate_condition_mode (code, GET_MODE (compare_result));
8482 return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
8486 /* Emit the RTL for an sCOND pattern. */
8488 void
8489 rs6000_emit_sCOND (code, result)
8490 enum rtx_code code;
8491 rtx result;
8493 rtx condition_rtx;
8494 enum machine_mode op_mode;
8496 condition_rtx = rs6000_generate_compare (code);
8498 op_mode = GET_MODE (rs6000_compare_op0);
8499 if (op_mode == VOIDmode)
8500 op_mode = GET_MODE (rs6000_compare_op1);
8502 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
8504 PUT_MODE (condition_rtx, DImode);
8505 convert_move (result, condition_rtx, 0);
8507 else
8509 PUT_MODE (condition_rtx, SImode);
8510 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
8514 /* Emit a branch of kind CODE to location LOC. */
8516 void
8517 rs6000_emit_cbranch (code, loc)
8518 enum rtx_code code;
8519 rtx loc;
8521 rtx condition_rtx, loc_ref;
8523 condition_rtx = rs6000_generate_compare (code);
8524 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
8525 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
8526 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
8527 loc_ref, pc_rtx)));
8530 /* Return the string to output a conditional branch to LABEL, which is
8531 the operand number of the label, or -1 if the branch is really a
8532 conditional return.
8534 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
8535 condition code register and its mode specifies what kind of
8536 comparison we made.
8538 REVERSED is nonzero if we should reverse the sense of the comparison.
8540 INSN is the insn. */
8542 char *
8543 output_cbranch (op, label, reversed, insn)
8544 rtx op;
8545 const char * label;
8546 int reversed;
8547 rtx insn;
8549 static char string[64];
8550 enum rtx_code code = GET_CODE (op);
8551 rtx cc_reg = XEXP (op, 0);
8552 enum machine_mode mode = GET_MODE (cc_reg);
8553 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
8554 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
8555 int really_reversed = reversed ^ need_longbranch;
8556 char *s = string;
8557 const char *ccode;
8558 const char *pred;
8559 rtx note;
8561 validate_condition_mode (code, mode);
8563 /* Work out which way this really branches. We could use
8564 reverse_condition_maybe_unordered here always but this
8565 makes the resulting assembler clearer. */
8566 if (really_reversed)
8568 /* Reversal of FP compares takes care -- an ordered compare
8569 becomes an unordered compare and vice versa. */
8570 if (mode == CCFPmode)
8571 code = reverse_condition_maybe_unordered (code);
8572 else
8573 code = reverse_condition (code);
8576 if ((TARGET_SPE && TARGET_HARD_FLOAT) && mode == CCFPmode)
8578 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
8579 to the GT bit. */
8580 if (code == EQ)
8581 /* Opposite of GT. */
8582 code = UNLE;
8583 else if (code == NE)
8584 code = GT;
8585 else
8586 abort ();
8589 switch (code)
8591 /* Not all of these are actually distinct opcodes, but
8592 we distinguish them for clarity of the resulting assembler. */
8593 case NE: case LTGT:
8594 ccode = "ne"; break;
8595 case EQ: case UNEQ:
8596 ccode = "eq"; break;
8597 case GE: case GEU:
8598 ccode = "ge"; break;
8599 case GT: case GTU: case UNGT:
8600 ccode = "gt"; break;
8601 case LE: case LEU:
8602 ccode = "le"; break;
8603 case LT: case LTU: case UNLT:
8604 ccode = "lt"; break;
8605 case UNORDERED: ccode = "un"; break;
8606 case ORDERED: ccode = "nu"; break;
8607 case UNGE: ccode = "nl"; break;
8608 case UNLE: ccode = "ng"; break;
8609 default:
8610 abort ();
8613 /* Maybe we have a guess as to how likely the branch is.
8614 The old mnemonics don't have a way to specify this information. */
8615 pred = "";
8616 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
8617 if (note != NULL_RTX)
8619 /* PROB is the difference from 50%. */
8620 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
8621 bool always_hint = rs6000_cpu != PROCESSOR_POWER4;
8623 /* Only hint for highly probable/improbable branches on newer
8624 cpus as static prediction overrides processor dynamic
8625 prediction. For older cpus we may as well always hint, but
8626 assume not taken for branches that are very close to 50% as a
8627 mispredicted taken branch is more expensive than a
8628 mispredicted not-taken branch. */
8629 if (always_hint
8630 || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
8632 if (abs (prob) > REG_BR_PROB_BASE / 20
8633 && ((prob > 0) ^ need_longbranch))
8634 pred = "+";
8635 else
8636 pred = "-";
8640 if (label == NULL)
8641 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
8642 else
8643 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
8645 /* We need to escape any '%' characters in the reg_names string.
8646 Assume they'd only be the first character... */
8647 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
8648 *s++ = '%';
8649 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
8651 if (label != NULL)
8653 /* If the branch distance was too far, we may have to use an
8654 unconditional branch to go the distance. */
8655 if (need_longbranch)
8656 s += sprintf (s, ",$+8\n\tb %s", label);
8657 else
8658 s += sprintf (s, ",%s", label);
8661 return string;
8664 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
8665 operands of the last comparison is nonzero/true, FALSE_COND if it
8666 is zero/false. Return 0 if the hardware has no such operation. */
8669 rs6000_emit_cmove (dest, op, true_cond, false_cond)
8670 rtx dest;
8671 rtx op;
8672 rtx true_cond;
8673 rtx false_cond;
8675 enum rtx_code code = GET_CODE (op);
8676 rtx op0 = rs6000_compare_op0;
8677 rtx op1 = rs6000_compare_op1;
8678 REAL_VALUE_TYPE c1;
8679 enum machine_mode compare_mode = GET_MODE (op0);
8680 enum machine_mode result_mode = GET_MODE (dest);
8681 rtx temp;
8683 /* These modes should always match. */
8684 if (GET_MODE (op1) != compare_mode
8685 /* In the isel case however, we can use a compare immediate, so
8686 op1 may be a small constant. */
8687 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
8688 return 0;
8689 if (GET_MODE (true_cond) != result_mode)
8690 return 0;
8691 if (GET_MODE (false_cond) != result_mode)
8692 return 0;
8694 /* First, work out if the hardware can do this at all, or
8695 if it's too slow... */
8696 if (! rs6000_compare_fp_p)
8698 if (TARGET_ISEL)
8699 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
8700 return 0;
8703 /* Eliminate half of the comparisons by switching operands, this
8704 makes the remaining code simpler. */
8705 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
8706 || code == LTGT || code == LT)
8708 code = reverse_condition_maybe_unordered (code);
8709 temp = true_cond;
8710 true_cond = false_cond;
8711 false_cond = temp;
8714 /* UNEQ and LTGT take four instructions for a comparison with zero,
8715 it'll probably be faster to use a branch here too. */
8716 if (code == UNEQ)
8717 return 0;
8719 if (GET_CODE (op1) == CONST_DOUBLE)
8720 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
8722 /* We're going to try to implement comparisons by performing
8723 a subtract, then comparing against zero. Unfortunately,
8724 Inf - Inf is NaN which is not zero, and so if we don't
8725 know that the operand is finite and the comparison
8726 would treat EQ different to UNORDERED, we can't do it. */
8727 if (! flag_unsafe_math_optimizations
8728 && code != GT && code != UNGE
8729 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
8730 /* Constructs of the form (a OP b ? a : b) are safe. */
8731 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
8732 || (! rtx_equal_p (op0, true_cond)
8733 && ! rtx_equal_p (op1, true_cond))))
8734 return 0;
8735 /* At this point we know we can use fsel. */
8737 /* Reduce the comparison to a comparison against zero. */
8738 temp = gen_reg_rtx (compare_mode);
8739 emit_insn (gen_rtx_SET (VOIDmode, temp,
8740 gen_rtx_MINUS (compare_mode, op0, op1)));
8741 op0 = temp;
8742 op1 = CONST0_RTX (compare_mode);
8744 /* If we don't care about NaNs we can reduce some of the comparisons
8745 down to faster ones. */
8746 if (flag_unsafe_math_optimizations)
8747 switch (code)
8749 case GT:
8750 code = LE;
8751 temp = true_cond;
8752 true_cond = false_cond;
8753 false_cond = temp;
8754 break;
8755 case UNGE:
8756 code = GE;
8757 break;
8758 case UNEQ:
8759 code = EQ;
8760 break;
8761 default:
8762 break;
8765 /* Now, reduce everything down to a GE. */
8766 switch (code)
8768 case GE:
8769 break;
8771 case LE:
8772 temp = gen_reg_rtx (compare_mode);
8773 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
8774 op0 = temp;
8775 break;
8777 case ORDERED:
8778 temp = gen_reg_rtx (compare_mode);
8779 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
8780 op0 = temp;
8781 break;
8783 case EQ:
8784 temp = gen_reg_rtx (compare_mode);
8785 emit_insn (gen_rtx_SET (VOIDmode, temp,
8786 gen_rtx_NEG (compare_mode,
8787 gen_rtx_ABS (compare_mode, op0))));
8788 op0 = temp;
8789 break;
8791 case UNGE:
8792 temp = gen_reg_rtx (result_mode);
8793 emit_insn (gen_rtx_SET (VOIDmode, temp,
8794 gen_rtx_IF_THEN_ELSE (result_mode,
8795 gen_rtx_GE (VOIDmode,
8796 op0, op1),
8797 true_cond, false_cond)));
8798 false_cond = temp;
8799 true_cond = false_cond;
8801 temp = gen_reg_rtx (compare_mode);
8802 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
8803 op0 = temp;
8804 break;
8806 case GT:
8807 temp = gen_reg_rtx (result_mode);
8808 emit_insn (gen_rtx_SET (VOIDmode, temp,
8809 gen_rtx_IF_THEN_ELSE (result_mode,
8810 gen_rtx_GE (VOIDmode,
8811 op0, op1),
8812 true_cond, false_cond)));
8813 true_cond = temp;
8814 false_cond = true_cond;
8816 temp = gen_reg_rtx (compare_mode);
8817 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
8818 op0 = temp;
8819 break;
8821 default:
8822 abort ();
8825 emit_insn (gen_rtx_SET (VOIDmode, dest,
8826 gen_rtx_IF_THEN_ELSE (result_mode,
8827 gen_rtx_GE (VOIDmode,
8828 op0, op1),
8829 true_cond, false_cond)));
8830 return 1;
8833 /* Same as above, but for ints (isel). */
8835 static int
8836 rs6000_emit_int_cmove (dest, op, true_cond, false_cond)
8837 rtx dest;
8838 rtx op;
8839 rtx true_cond;
8840 rtx false_cond;
8842 rtx condition_rtx, cr;
8844 /* All isel implementations thus far are 32-bits. */
8845 if (GET_MODE (rs6000_compare_op0) != SImode)
8846 return 0;
8848 /* We still have to do the compare, because isel doesn't do a
8849 compare, it just looks at the CRx bits set by a previous compare
8850 instruction. */
8851 condition_rtx = rs6000_generate_compare (GET_CODE (op));
8852 cr = XEXP (condition_rtx, 0);
8854 if (GET_MODE (cr) == CCmode)
8855 emit_insn (gen_isel_signed (dest, condition_rtx,
8856 true_cond, false_cond, cr));
8857 else
8858 emit_insn (gen_isel_unsigned (dest, condition_rtx,
8859 true_cond, false_cond, cr));
8861 return 1;
8864 const char *
8865 output_isel (operands)
8866 rtx *operands;
8868 enum rtx_code code;
8870 code = GET_CODE (operands[1]);
8871 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
8873 PUT_CODE (operands[1], reverse_condition (code));
8874 return "isel %0,%3,%2,%j1";
8876 else
8877 return "isel %0,%2,%3,%j1";
8880 void
8881 rs6000_emit_minmax (dest, code, op0, op1)
8882 rtx dest;
8883 enum rtx_code code;
8884 rtx op0;
8885 rtx op1;
8887 enum machine_mode mode = GET_MODE (op0);
8888 enum rtx_code c;
8889 rtx target;
8891 if (code == SMAX || code == SMIN)
8892 c = GE;
8893 else
8894 c = GEU;
8896 if (code == SMAX || code == UMAX)
8897 target = emit_conditional_move (dest, c, op0, op1, mode,
8898 op0, op1, mode, 0);
8899 else
8900 target = emit_conditional_move (dest, c, op0, op1, mode,
8901 op1, op0, mode, 0);
8902 if (target == NULL_RTX)
8903 abort ();
8904 if (target != dest)
8905 emit_move_insn (dest, target);
8908 /* This page contains routines that are used to determine what the
8909 function prologue and epilogue code will do and write them out. */
8911 /* Return the first fixed-point register that is required to be
8912 saved. 32 if none. */
8915 first_reg_to_save ()
8917 int first_reg;
8919 /* Find lowest numbered live register. */
8920 for (first_reg = 13; first_reg <= 31; first_reg++)
8921 if (regs_ever_live[first_reg]
8922 && (! call_used_regs[first_reg]
8923 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
8924 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
8925 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
8926 break;
8928 #if TARGET_MACHO
8929 if (flag_pic
8930 && current_function_uses_pic_offset_table
8931 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
8932 return RS6000_PIC_OFFSET_TABLE_REGNUM;
8933 #endif
8935 return first_reg;
8938 /* Similar, for FP regs. */
8941 first_fp_reg_to_save ()
8943 int first_reg;
8945 /* Find lowest numbered live register. */
8946 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
8947 if (regs_ever_live[first_reg])
8948 break;
8950 return first_reg;
8953 /* Similar, for AltiVec regs. */
8955 static int
8956 first_altivec_reg_to_save ()
8958 int i;
8960 /* Stack frame remains as is unless we are in AltiVec ABI. */
8961 if (! TARGET_ALTIVEC_ABI)
8962 return LAST_ALTIVEC_REGNO + 1;
8964 /* Find lowest numbered live register. */
8965 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
8966 if (regs_ever_live[i])
8967 break;
8969 return i;
8972 /* Return a 32-bit mask of the AltiVec registers we need to set in
8973 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
8974 the 32-bit word is 0. */
8976 static unsigned int
8977 compute_vrsave_mask ()
8979 unsigned int i, mask = 0;
8981 /* First, find out if we use _any_ altivec registers. */
8982 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
8983 if (regs_ever_live[i])
8984 mask |= ALTIVEC_REG_BIT (i);
8986 if (mask == 0)
8987 return mask;
8989 /* Next, add all registers that are call-clobbered. We do this
8990 because post-reload register optimizers such as regrename_optimize
8991 may choose to use them. They never change the register class
8992 chosen by reload, so cannot create new uses of altivec registers
8993 if there were none before, so the early exit above is safe. */
8994 /* ??? Alternately, we could define HARD_REGNO_RENAME_OK to disallow
8995 altivec registers not saved in the mask, which might well make the
8996 adjustments below more effective in eliding the save/restore of
8997 VRSAVE in small functions. */
8998 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
8999 if (call_used_regs[i])
9000 mask |= ALTIVEC_REG_BIT (i);
9002 /* Next, remove the argument registers from the set. These must
9003 be in the VRSAVE mask set by the caller, so we don't need to add
9004 them in again. More importantly, the mask we compute here is
9005 used to generate CLOBBERs in the set_vrsave insn, and we do not
9006 wish the argument registers to die. */
9007 for (i = cfun->args_info.vregno; i >= ALTIVEC_ARG_MIN_REG; --i)
9008 mask &= ~ALTIVEC_REG_BIT (i);
9010 /* Similarly, remove the return value from the set. */
9012 bool yes = false;
9013 diddle_return_value (is_altivec_return_reg, &yes);
9014 if (yes)
9015 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
9018 return mask;
9021 static void
9022 is_altivec_return_reg (reg, xyes)
9023 rtx reg;
9024 void *xyes;
9026 bool *yes = (bool *) xyes;
9027 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
9028 *yes = true;
9032 /* Calculate the stack information for the current function. This is
9033 complicated by having two separate calling sequences, the AIX calling
9034 sequence and the V.4 calling sequence.
9036 AIX (and Darwin/Mac OS X) stack frames look like:
9037 32-bit 64-bit
9038 SP----> +---------------------------------------+
9039 | back chain to caller | 0 0
9040 +---------------------------------------+
9041 | saved CR | 4 8 (8-11)
9042 +---------------------------------------+
9043 | saved LR | 8 16
9044 +---------------------------------------+
9045 | reserved for compilers | 12 24
9046 +---------------------------------------+
9047 | reserved for binders | 16 32
9048 +---------------------------------------+
9049 | saved TOC pointer | 20 40
9050 +---------------------------------------+
9051 | Parameter save area (P) | 24 48
9052 +---------------------------------------+
9053 | Alloca space (A) | 24+P etc.
9054 +---------------------------------------+
9055 | Local variable space (L) | 24+P+A
9056 +---------------------------------------+
9057 | Float/int conversion temporary (X) | 24+P+A+L
9058 +---------------------------------------+
9059 | Save area for AltiVec registers (W) | 24+P+A+L+X
9060 +---------------------------------------+
9061 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
9062 +---------------------------------------+
9063 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
9064 +---------------------------------------+
9065 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
9066 +---------------------------------------+
9067 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
9068 +---------------------------------------+
9069 old SP->| back chain to caller's caller |
9070 +---------------------------------------+
9072 The required alignment for AIX configurations is two words (i.e., 8
9073 or 16 bytes).
9076 V.4 stack frames look like:
9078 SP----> +---------------------------------------+
9079 | back chain to caller | 0
9080 +---------------------------------------+
9081 | caller's saved LR | 4
9082 +---------------------------------------+
9083 | Parameter save area (P) | 8
9084 +---------------------------------------+
9085 | Alloca space (A) | 8+P
9086 +---------------------------------------+
9087 | Varargs save area (V) | 8+P+A
9088 +---------------------------------------+
9089 | Local variable space (L) | 8+P+A+V
9090 +---------------------------------------+
9091 | Float/int conversion temporary (X) | 8+P+A+V+L
9092 +---------------------------------------+
9093 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
9094 +---------------------------------------+
9095 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
9096 +---------------------------------------+
9097 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
9098 +---------------------------------------+
9099 | SPE: area for 64-bit GP registers |
9100 +---------------------------------------+
9101 | SPE alignment padding |
9102 +---------------------------------------+
9103 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
9104 +---------------------------------------+
9105 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
9106 +---------------------------------------+
9107 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
9108 +---------------------------------------+
9109 old SP->| back chain to caller's caller |
9110 +---------------------------------------+
9112 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
9113 given. (But note below and in sysv4.h that we require only 8 and
9114 may round up the size of our stack frame anyways. The historical
9115 reason is early versions of powerpc-linux which didn't properly
9116 align the stack at program startup. A happy side-effect is that
9117 -mno-eabi libraries can be used with -meabi programs.)
9119 The EABI configuration defaults to the V.4 layout, unless
9120 -mcall-aix is used, in which case the AIX layout is used. However,
9121 the stack alignment requirements may differ. If -mno-eabi is not
9122 given, the required stack alignment is 8 bytes; if -mno-eabi is
9123 given, the required alignment is 16 bytes. (But see V.4 comment
9124 above.) */
9126 #ifndef ABI_STACK_BOUNDARY
9127 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
9128 #endif
9130 rs6000_stack_t *
9131 rs6000_stack_info ()
9133 static rs6000_stack_t info, zero_info;
9134 rs6000_stack_t *info_ptr = &info;
9135 int reg_size = TARGET_POWERPC64 ? 8 : 4;
9136 enum rs6000_abi abi;
9137 int ehrd_size;
9138 int total_raw_size;
9139 static int insn_chain_scanned = -1;
9141 /* Zero all fields portably. */
9142 info = zero_info;
9144 if (TARGET_SPE)
9146 /* Cache value so we don't rescan instruction chain over and over. */
9147 if (insn_chain_scanned < 0)
9148 insn_chain_scanned = (int) spe_func_has_64bit_regs_p ();
9149 info_ptr->spe_64bit_regs_used = insn_chain_scanned;
9152 /* Select which calling sequence. */
9153 info_ptr->abi = abi = DEFAULT_ABI;
9155 /* Calculate which registers need to be saved & save area size. */
9156 info_ptr->first_gp_reg_save = first_reg_to_save ();
9157 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
9158 even if it currently looks like we won't. */
9159 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
9160 || (flag_pic == 1 && abi == ABI_V4)
9161 || (flag_pic && abi == ABI_DARWIN))
9162 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
9163 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
9164 else
9165 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
9167 /* For the SPE, we have an additional upper 32-bits on each GPR.
9168 Ideally we should save the entire 64-bits only when the upper
9169 half is used in SIMD instructions. Since we only record
9170 registers live (not the size they are used in), this proves
9171 difficult because we'd have to traverse the instruction chain at
9172 the right time, taking reload into account. This is a real pain,
9173 so we opt to save the GPRs in 64-bits always if but one register
9174 gets used in 64-bits. Otherwise, all the registers in the frame
9175 get saved in 32-bits.
9177 So... since when we save all GPRs (except the SP) in 64-bits, the
9178 traditional GP save area will be empty. */
9179 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
9180 info_ptr->gp_size = 0;
9182 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
9183 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
9185 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
9186 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
9187 - info_ptr->first_altivec_reg_save);
9189 /* Does this function call anything? */
9190 info_ptr->calls_p = (! current_function_is_leaf
9191 || cfun->machine->ra_needs_full_frame);
9193 /* Determine if we need to save the link register. */
9194 if (rs6000_ra_ever_killed ()
9195 || (DEFAULT_ABI == ABI_AIX && current_function_profile)
9196 #ifdef TARGET_RELOCATABLE
9197 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
9198 #endif
9199 || (info_ptr->first_fp_reg_save != 64
9200 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
9201 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
9202 || (abi == ABI_V4 && current_function_calls_alloca)
9203 || (DEFAULT_ABI == ABI_DARWIN
9204 && flag_pic
9205 && current_function_uses_pic_offset_table)
9206 || info_ptr->calls_p)
9208 info_ptr->lr_save_p = 1;
9209 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
9212 /* Determine if we need to save the condition code registers. */
9213 if (regs_ever_live[CR2_REGNO]
9214 || regs_ever_live[CR3_REGNO]
9215 || regs_ever_live[CR4_REGNO])
9217 info_ptr->cr_save_p = 1;
9218 if (abi == ABI_V4)
9219 info_ptr->cr_size = reg_size;
9222 /* If the current function calls __builtin_eh_return, then we need
9223 to allocate stack space for registers that will hold data for
9224 the exception handler. */
9225 if (current_function_calls_eh_return)
9227 unsigned int i;
9228 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
9229 continue;
9231 /* SPE saves EH registers in 64-bits. */
9232 ehrd_size = i * (TARGET_SPE_ABI
9233 && info_ptr->spe_64bit_regs_used != 0
9234 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
9236 else
9237 ehrd_size = 0;
9239 /* Determine various sizes. */
9240 info_ptr->reg_size = reg_size;
9241 info_ptr->fixed_size = RS6000_SAVE_AREA;
9242 info_ptr->varargs_size = RS6000_VARARGS_AREA;
9243 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
9244 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
9247 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
9248 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
9249 else
9250 info_ptr->spe_gp_size = 0;
9252 if (TARGET_ALTIVEC_ABI && TARGET_ALTIVEC_VRSAVE)
9254 info_ptr->vrsave_mask = compute_vrsave_mask ();
9255 info_ptr->vrsave_size = info_ptr->vrsave_mask ? 4 : 0;
9257 else
9259 info_ptr->vrsave_mask = 0;
9260 info_ptr->vrsave_size = 0;
9263 /* Calculate the offsets. */
9264 switch (abi)
9266 case ABI_NONE:
9267 default:
9268 abort ();
9270 case ABI_AIX:
9271 case ABI_AIX_NODESC:
9272 case ABI_DARWIN:
9273 info_ptr->fp_save_offset = - info_ptr->fp_size;
9274 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
9276 if (TARGET_ALTIVEC_ABI)
9278 info_ptr->vrsave_save_offset
9279 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
9281 /* Align stack so vector save area is on a quadword boundary. */
9282 if (info_ptr->altivec_size != 0)
9283 info_ptr->altivec_padding_size
9284 = 16 - (-info_ptr->vrsave_save_offset % 16);
9285 else
9286 info_ptr->altivec_padding_size = 0;
9288 info_ptr->altivec_save_offset
9289 = info_ptr->vrsave_save_offset
9290 - info_ptr->altivec_padding_size
9291 - info_ptr->altivec_size;
9293 /* Adjust for AltiVec case. */
9294 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
9296 else
9297 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
9298 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
9299 info_ptr->lr_save_offset = 2*reg_size;
9300 break;
9302 case ABI_V4:
9303 info_ptr->fp_save_offset = - info_ptr->fp_size;
9304 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
9305 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
9307 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
9309 /* Align stack so SPE GPR save area is aligned on a
9310 double-word boundary. */
9311 if (info_ptr->spe_gp_size != 0)
9312 info_ptr->spe_padding_size
9313 = 8 - (-info_ptr->cr_save_offset % 8);
9314 else
9315 info_ptr->spe_padding_size = 0;
9317 info_ptr->spe_gp_save_offset
9318 = info_ptr->cr_save_offset
9319 - info_ptr->spe_padding_size
9320 - info_ptr->spe_gp_size;
9322 /* Adjust for SPE case. */
9323 info_ptr->toc_save_offset
9324 = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
9326 else if (TARGET_ALTIVEC_ABI)
9328 info_ptr->vrsave_save_offset
9329 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
9331 /* Align stack so vector save area is on a quadword boundary. */
9332 if (info_ptr->altivec_size != 0)
9333 info_ptr->altivec_padding_size
9334 = 16 - (-info_ptr->vrsave_save_offset % 16);
9335 else
9336 info_ptr->altivec_padding_size = 0;
9338 info_ptr->altivec_save_offset
9339 = info_ptr->vrsave_save_offset
9340 - info_ptr->altivec_padding_size
9341 - info_ptr->altivec_size;
9343 /* Adjust for AltiVec case. */
9344 info_ptr->toc_save_offset
9345 = info_ptr->altivec_save_offset - info_ptr->toc_size;
9347 else
9348 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
9349 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
9350 info_ptr->lr_save_offset = reg_size;
9351 break;
9354 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
9355 + info_ptr->gp_size
9356 + info_ptr->altivec_size
9357 + info_ptr->altivec_padding_size
9358 + info_ptr->vrsave_size
9359 + info_ptr->spe_gp_size
9360 + info_ptr->spe_padding_size
9361 + ehrd_size
9362 + info_ptr->cr_size
9363 + info_ptr->lr_size
9364 + info_ptr->vrsave_size
9365 + info_ptr->toc_size,
9366 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
9367 ? 16 : 8);
9369 total_raw_size = (info_ptr->vars_size
9370 + info_ptr->parm_size
9371 + info_ptr->save_size
9372 + info_ptr->varargs_size
9373 + info_ptr->fixed_size);
9375 info_ptr->total_size =
9376 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
9378 /* Determine if we need to allocate any stack frame:
9380 For AIX we need to push the stack if a frame pointer is needed
9381 (because the stack might be dynamically adjusted), if we are
9382 debugging, if we make calls, or if the sum of fp_save, gp_save,
9383 and local variables are more than the space needed to save all
9384 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
9385 + 18*8 = 288 (GPR13 reserved).
9387 For V.4 we don't have the stack cushion that AIX uses, but assume
9388 that the debugger can handle stackless frames. */
9390 if (info_ptr->calls_p)
9391 info_ptr->push_p = 1;
9393 else if (abi == ABI_V4)
9394 info_ptr->push_p = total_raw_size > info_ptr->fixed_size;
9396 else
9397 info_ptr->push_p = (frame_pointer_needed
9398 || (abi != ABI_DARWIN && write_symbols != NO_DEBUG)
9399 || ((total_raw_size - info_ptr->fixed_size)
9400 > (TARGET_32BIT ? 220 : 288)));
9402 /* Zero offsets if we're not saving those registers. */
9403 if (info_ptr->fp_size == 0)
9404 info_ptr->fp_save_offset = 0;
9406 if (info_ptr->gp_size == 0)
9407 info_ptr->gp_save_offset = 0;
9409 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
9410 info_ptr->altivec_save_offset = 0;
9412 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
9413 info_ptr->vrsave_save_offset = 0;
9415 if (! TARGET_SPE_ABI
9416 || info_ptr->spe_64bit_regs_used == 0
9417 || info_ptr->spe_gp_size == 0)
9418 info_ptr->spe_gp_save_offset = 0;
9420 if (! info_ptr->lr_save_p)
9421 info_ptr->lr_save_offset = 0;
9423 if (! info_ptr->cr_save_p)
9424 info_ptr->cr_save_offset = 0;
9426 if (! info_ptr->toc_save_p)
9427 info_ptr->toc_save_offset = 0;
9429 return info_ptr;
9432 /* Return true if the current function uses any GPRs in 64-bit SIMD
9433 mode. */
9435 static bool
9436 spe_func_has_64bit_regs_p ()
9438 rtx insns, insn;
9440 /* Functions that save and restore all the call-saved registers will
9441 need to save/restore the registers in 64-bits. */
9442 if (current_function_calls_eh_return
9443 || current_function_calls_setjmp
9444 || current_function_has_nonlocal_goto)
9445 return true;
9447 insns = get_insns ();
9449 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
9451 if (INSN_P (insn))
9453 rtx i;
9455 i = PATTERN (insn);
9456 if (GET_CODE (i) == SET
9457 && SPE_VECTOR_MODE (GET_MODE (SET_SRC (i))))
9458 return true;
9462 return false;
9465 void
9466 debug_stack_info (info)
9467 rs6000_stack_t *info;
9469 const char *abi_string;
9471 if (! info)
9472 info = rs6000_stack_info ();
9474 fprintf (stderr, "\nStack information for function %s:\n",
9475 ((current_function_decl && DECL_NAME (current_function_decl))
9476 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
9477 : "<unknown>"));
9479 switch (info->abi)
9481 default: abi_string = "Unknown"; break;
9482 case ABI_NONE: abi_string = "NONE"; break;
9483 case ABI_AIX:
9484 case ABI_AIX_NODESC: abi_string = "AIX"; break;
9485 case ABI_DARWIN: abi_string = "Darwin"; break;
9486 case ABI_V4: abi_string = "V.4"; break;
9489 fprintf (stderr, "\tABI = %5s\n", abi_string);
9491 if (TARGET_ALTIVEC_ABI)
9492 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
9494 if (TARGET_SPE_ABI)
9495 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
9497 if (info->first_gp_reg_save != 32)
9498 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
9500 if (info->first_fp_reg_save != 64)
9501 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
9503 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
9504 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
9505 info->first_altivec_reg_save);
9507 if (info->lr_save_p)
9508 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
9510 if (info->cr_save_p)
9511 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
9513 if (info->toc_save_p)
9514 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
9516 if (info->vrsave_mask)
9517 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
9519 if (info->push_p)
9520 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
9522 if (info->calls_p)
9523 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
9525 if (info->gp_save_offset)
9526 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
9528 if (info->fp_save_offset)
9529 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
9531 if (info->altivec_save_offset)
9532 fprintf (stderr, "\taltivec_save_offset = %5d\n",
9533 info->altivec_save_offset);
9535 if (info->spe_gp_save_offset)
9536 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
9537 info->spe_gp_save_offset);
9539 if (info->vrsave_save_offset)
9540 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
9541 info->vrsave_save_offset);
9543 if (info->lr_save_offset)
9544 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
9546 if (info->cr_save_offset)
9547 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
9549 if (info->toc_save_offset)
9550 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
9552 if (info->varargs_save_offset)
9553 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
9555 if (info->total_size)
9556 fprintf (stderr, "\ttotal_size = %5d\n", info->total_size);
9558 if (info->varargs_size)
9559 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
9561 if (info->vars_size)
9562 fprintf (stderr, "\tvars_size = %5d\n", info->vars_size);
9564 if (info->parm_size)
9565 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
9567 if (info->fixed_size)
9568 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
9570 if (info->gp_size)
9571 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
9573 if (info->spe_gp_size)
9574 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
9576 if (info->fp_size)
9577 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
9579 if (info->altivec_size)
9580 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
9582 if (info->vrsave_size)
9583 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
9585 if (info->altivec_padding_size)
9586 fprintf (stderr, "\taltivec_padding_size= %5d\n",
9587 info->altivec_padding_size);
9589 if (info->spe_padding_size)
9590 fprintf (stderr, "\tspe_padding_size = %5d\n",
9591 info->spe_padding_size);
9593 if (info->lr_size)
9594 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
9596 if (info->cr_size)
9597 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
9599 if (info->toc_size)
9600 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
9602 if (info->save_size)
9603 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
9605 if (info->reg_size != 4)
9606 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
9608 fprintf (stderr, "\n");
9612 rs6000_return_addr (count, frame)
9613 int count;
9614 rtx frame;
9616 /* Currently we don't optimize very well between prolog and body
9617 code and for PIC code the code can be actually quite bad, so
9618 don't try to be too clever here. */
9619 if (count != 0 || flag_pic != 0)
9621 cfun->machine->ra_needs_full_frame = 1;
9623 return
9624 gen_rtx_MEM
9625 (Pmode,
9626 memory_address
9627 (Pmode,
9628 plus_constant (copy_to_reg
9629 (gen_rtx_MEM (Pmode,
9630 memory_address (Pmode, frame))),
9631 RETURN_ADDRESS_OFFSET)));
9634 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
9637 /* Say whether a function is a candidate for sibcall handling or not.
9638 We do not allow indirect calls to be optimized into sibling calls.
9639 Also, we can't do it if there are any vector parameters; there's
9640 nowhere to put the VRsave code so it works; note that functions with
9641 vector parameters are required to have a prototype, so the argument
9642 type info must be available here. (The tail recursion case can work
9643 with vector parameters, but there's no way to distinguish here.) */
9644 static bool
9645 rs6000_function_ok_for_sibcall (decl, exp)
9646 tree decl;
9647 tree exp ATTRIBUTE_UNUSED;
9649 tree type;
9650 if (decl)
9652 if (TARGET_ALTIVEC_VRSAVE)
9654 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
9655 type; type = TREE_CHAIN (type))
9657 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
9658 return false;
9661 if (DEFAULT_ABI == ABI_DARWIN
9662 || (*targetm.binds_local_p) (decl))
9664 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
9666 if (!lookup_attribute ("longcall", attr_list)
9667 || lookup_attribute ("shortcall", attr_list))
9668 return true;
9671 return false;
9674 static int
9675 rs6000_ra_ever_killed ()
9677 rtx top;
9678 rtx reg;
9679 rtx insn;
9681 /* Irritatingly, there are two kinds of thunks -- those created with
9682 TARGET_ASM_OUTPUT_MI_THUNK and those with DECL_THUNK_P that go
9683 through the regular part of the compiler. This is a very hacky
9684 way to tell them apart. */
9685 if (current_function_is_thunk && !no_new_pseudos)
9686 return 0;
9688 /* regs_ever_live has LR marked as used if any sibcalls are present,
9689 but this should not force saving and restoring in the
9690 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
9691 clobbers LR, so that is inappropriate. */
9693 /* Also, the prologue can generate a store into LR that
9694 doesn't really count, like this:
9696 move LR->R0
9697 bcl to set PIC register
9698 move LR->R31
9699 move R0->LR
9701 When we're called from the epilogue, we need to avoid counting
9702 this as a store. */
9704 push_topmost_sequence ();
9705 top = get_insns ();
9706 pop_topmost_sequence ();
9707 reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
9709 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
9711 if (INSN_P (insn))
9713 if (FIND_REG_INC_NOTE (insn, reg))
9714 return 1;
9715 else if (GET_CODE (insn) == CALL_INSN
9716 && !SIBLING_CALL_P (insn))
9717 return 1;
9718 else if (set_of (reg, insn) != NULL_RTX
9719 && !prologue_epilogue_contains (insn))
9720 return 1;
9723 return 0;
9726 /* Add a REG_MAYBE_DEAD note to the insn. */
9727 static void
9728 rs6000_maybe_dead (insn)
9729 rtx insn;
9731 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
9732 const0_rtx,
9733 REG_NOTES (insn));
9736 /* Emit instructions needed to load the TOC register.
9737 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
9738 a constant pool; or for SVR4 -fpic. */
9740 void
9741 rs6000_emit_load_toc_table (fromprolog)
9742 int fromprolog;
9744 rtx dest;
9745 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
9747 if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
9749 rtx temp = (fromprolog
9750 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
9751 : gen_reg_rtx (Pmode));
9752 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_pic_si (temp)));
9753 rs6000_maybe_dead (emit_move_insn (dest, temp));
9755 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
9757 char buf[30];
9758 rtx tempLR = (fromprolog
9759 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
9760 : gen_reg_rtx (Pmode));
9761 rtx temp0 = (fromprolog
9762 ? gen_rtx_REG (Pmode, 0)
9763 : gen_reg_rtx (Pmode));
9764 rtx symF;
9766 /* possibly create the toc section */
9767 if (! toc_initialized)
9769 toc_section ();
9770 function_section (current_function_decl);
9773 if (fromprolog)
9775 rtx symL;
9777 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
9778 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9780 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
9781 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9783 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
9784 symF)));
9785 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
9786 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
9787 symL,
9788 symF)));
9790 else
9792 rtx tocsym;
9793 static int reload_toc_labelno = 0;
9795 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
9797 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
9798 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9800 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1b (tempLR,
9801 symF,
9802 tocsym)));
9803 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
9804 rs6000_maybe_dead (emit_move_insn (temp0,
9805 gen_rtx_MEM (Pmode, dest)));
9807 rs6000_maybe_dead (emit_insn (gen_addsi3 (dest, temp0, dest)));
9809 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
9811 /* This is for AIX code running in non-PIC ELF32. */
9812 char buf[30];
9813 rtx realsym;
9814 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
9815 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9817 rs6000_maybe_dead (emit_insn (gen_elf_high (dest, realsym)));
9818 rs6000_maybe_dead (emit_insn (gen_elf_low (dest, dest, realsym)));
9820 else if (DEFAULT_ABI == ABI_AIX)
9822 if (TARGET_32BIT)
9823 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_si (dest)));
9824 else
9825 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_di (dest)));
9827 else
9828 abort ();
9831 int
9832 get_TOC_alias_set ()
9834 static int set = -1;
9835 if (set == -1)
9836 set = new_alias_set ();
9837 return set;
9840 /* This retuns nonzero if the current function uses the TOC. This is
9841 determined by the presence of (unspec ... 7), which is generated by
9842 the various load_toc_* patterns. */
9845 uses_TOC ()
9847 rtx insn;
9849 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9850 if (INSN_P (insn))
9852 rtx pat = PATTERN (insn);
9853 int i;
9855 if (GET_CODE (pat) == PARALLEL)
9856 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
9857 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == UNSPEC
9858 && XINT (XVECEXP (PATTERN (insn), 0, i), 1) == 7)
9859 return 1;
9861 return 0;
9865 create_TOC_reference (symbol)
9866 rtx symbol;
9868 return gen_rtx_PLUS (Pmode,
9869 gen_rtx_REG (Pmode, TOC_REGISTER),
9870 gen_rtx_CONST (Pmode,
9871 gen_rtx_MINUS (Pmode, symbol,
9872 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
9875 #if TARGET_AIX
9876 /* __throw will restore its own return address to be the same as the
9877 return address of the function that the throw is being made to.
9878 This is unfortunate, because we want to check the original
9879 return address to see if we need to restore the TOC.
9880 So we have to squirrel it away here.
9881 This is used only in compiling __throw and __rethrow.
9883 Most of this code should be removed by CSE. */
9884 static rtx insn_after_throw;
9886 /* This does the saving... */
9887 void
9888 rs6000_aix_emit_builtin_unwind_init ()
9890 rtx mem;
9891 rtx stack_top = gen_reg_rtx (Pmode);
9892 rtx opcode_addr = gen_reg_rtx (Pmode);
9894 insn_after_throw = gen_reg_rtx (SImode);
9896 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
9897 emit_move_insn (stack_top, mem);
9899 mem = gen_rtx_MEM (Pmode,
9900 gen_rtx_PLUS (Pmode, stack_top,
9901 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
9902 emit_move_insn (opcode_addr, mem);
9903 emit_move_insn (insn_after_throw, gen_rtx_MEM (SImode, opcode_addr));
9906 /* Emit insns to _restore_ the TOC register, at runtime (specifically
9907 in _eh.o). Only used on AIX.
9909 The idea is that on AIX, function calls look like this:
9910 bl somefunction-trampoline
9911 lwz r2,20(sp)
9913 and later,
9914 somefunction-trampoline:
9915 stw r2,20(sp)
9916 ... load function address in the count register ...
9917 bctr
9918 or like this, if the linker determines that this is not a cross-module call
9919 and so the TOC need not be restored:
9920 bl somefunction
9922 or like this, if the compiler could determine that this is not a
9923 cross-module call:
9924 bl somefunction
9925 now, the tricky bit here is that register 2 is saved and restored
9926 by the _linker_, so we can't readily generate debugging information
9927 for it. So we need to go back up the call chain looking at the
9928 insns at return addresses to see which calls saved the TOC register
9929 and so see where it gets restored from.
9931 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
9932 just before the actual epilogue.
9934 On the bright side, this incurs no space or time overhead unless an
9935 exception is thrown, except for the extra code in libgcc.a.
9937 The parameter STACKSIZE is a register containing (at runtime)
9938 the amount to be popped off the stack in addition to the stack frame
9939 of this routine (which will be __throw or __rethrow, and so is
9940 guaranteed to have a stack frame). */
9942 void
9943 rs6000_emit_eh_toc_restore (stacksize)
9944 rtx stacksize;
9946 rtx top_of_stack;
9947 rtx bottom_of_stack = gen_reg_rtx (Pmode);
9948 rtx tocompare = gen_reg_rtx (SImode);
9949 rtx opcode = gen_reg_rtx (SImode);
9950 rtx opcode_addr = gen_reg_rtx (Pmode);
9951 rtx mem;
9952 rtx loop_start = gen_label_rtx ();
9953 rtx no_toc_restore_needed = gen_label_rtx ();
9954 rtx loop_exit = gen_label_rtx ();
9956 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
9957 set_mem_alias_set (mem, rs6000_sr_alias_set);
9958 emit_move_insn (bottom_of_stack, mem);
9960 top_of_stack = expand_binop (Pmode, add_optab,
9961 bottom_of_stack, stacksize,
9962 NULL_RTX, 1, OPTAB_WIDEN);
9964 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
9965 : 0xE8410028, SImode));
9967 if (insn_after_throw == NULL_RTX)
9968 abort ();
9969 emit_move_insn (opcode, insn_after_throw);
9971 emit_note (NULL, NOTE_INSN_LOOP_BEG);
9972 emit_label (loop_start);
9974 do_compare_rtx_and_jump (opcode, tocompare, NE, 1,
9975 SImode, NULL_RTX, NULL_RTX,
9976 no_toc_restore_needed);
9978 mem = gen_rtx_MEM (Pmode,
9979 gen_rtx_PLUS (Pmode, bottom_of_stack,
9980 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
9981 emit_move_insn (gen_rtx_REG (Pmode, 2), mem);
9983 emit_label (no_toc_restore_needed);
9984 do_compare_rtx_and_jump (top_of_stack, bottom_of_stack, EQ, 1,
9985 Pmode, NULL_RTX, NULL_RTX,
9986 loop_exit);
9988 mem = gen_rtx_MEM (Pmode, bottom_of_stack);
9989 set_mem_alias_set (mem, rs6000_sr_alias_set);
9990 emit_move_insn (bottom_of_stack, mem);
9992 mem = gen_rtx_MEM (Pmode,
9993 gen_rtx_PLUS (Pmode, bottom_of_stack,
9994 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
9995 emit_move_insn (opcode_addr, mem);
9996 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
9998 emit_note (NULL, NOTE_INSN_LOOP_CONT);
9999 emit_jump (loop_start);
10000 emit_note (NULL, NOTE_INSN_LOOP_END);
10001 emit_label (loop_exit);
10003 #endif /* TARGET_AIX */
10005 /* This ties together stack memory (MEM with an alias set of
10006 rs6000_sr_alias_set) and the change to the stack pointer. */
10008 static void
10009 rs6000_emit_stack_tie ()
10011 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
10013 set_mem_alias_set (mem, rs6000_sr_alias_set);
10014 emit_insn (gen_stack_tie (mem));
10017 /* Emit the correct code for allocating stack space, as insns.
10018 If COPY_R12, make sure a copy of the old frame is left in r12.
10019 The generated code may use hard register 0 as a temporary. */
10021 static void
10022 rs6000_emit_allocate_stack (size, copy_r12)
10023 HOST_WIDE_INT size;
10024 int copy_r12;
10026 rtx insn;
10027 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
10028 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
10029 rtx todec = GEN_INT (-size);
10031 if (current_function_limit_stack)
10033 if (REG_P (stack_limit_rtx)
10034 && REGNO (stack_limit_rtx) > 1
10035 && REGNO (stack_limit_rtx) <= 31)
10037 emit_insn (TARGET_32BIT
10038 ? gen_addsi3 (tmp_reg,
10039 stack_limit_rtx,
10040 GEN_INT (size))
10041 : gen_adddi3 (tmp_reg,
10042 stack_limit_rtx,
10043 GEN_INT (size)));
10045 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
10046 const0_rtx));
10048 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
10049 && TARGET_32BIT
10050 && DEFAULT_ABI == ABI_V4)
10052 rtx toload = gen_rtx_CONST (VOIDmode,
10053 gen_rtx_PLUS (Pmode,
10054 stack_limit_rtx,
10055 GEN_INT (size)));
10057 emit_insn (gen_elf_high (tmp_reg, toload));
10058 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
10059 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
10060 const0_rtx));
10062 else
10063 warning ("stack limit expression is not supported");
10066 if (copy_r12 || ! TARGET_UPDATE)
10067 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
10069 if (TARGET_UPDATE)
10071 if (size > 32767)
10073 /* Need a note here so that try_split doesn't get confused. */
10074 if (get_last_insn() == NULL_RTX)
10075 emit_note (0, NOTE_INSN_DELETED);
10076 insn = emit_move_insn (tmp_reg, todec);
10077 try_split (PATTERN (insn), insn, 0);
10078 todec = tmp_reg;
10081 insn = emit_insn (TARGET_32BIT
10082 ? gen_movsi_update (stack_reg, stack_reg,
10083 todec, stack_reg)
10084 : gen_movdi_update (stack_reg, stack_reg,
10085 todec, stack_reg));
10087 else
10089 insn = emit_insn (TARGET_32BIT
10090 ? gen_addsi3 (stack_reg, stack_reg, todec)
10091 : gen_adddi3 (stack_reg, stack_reg, todec));
10092 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
10093 gen_rtx_REG (Pmode, 12));
10096 RTX_FRAME_RELATED_P (insn) = 1;
10097 REG_NOTES (insn) =
10098 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
10099 gen_rtx_SET (VOIDmode, stack_reg,
10100 gen_rtx_PLUS (Pmode, stack_reg,
10101 GEN_INT (-size))),
10102 REG_NOTES (insn));
10105 /* Add a RTX_FRAME_RELATED note so that dwarf2out_frame_debug_expr
10106 knows that:
10108 (mem (plus (blah) (regXX)))
10110 is really:
10112 (mem (plus (blah) (const VALUE_OF_REGXX))). */
10114 static void
10115 altivec_frame_fixup (insn, reg, val)
10116 rtx insn, reg;
10117 HOST_WIDE_INT val;
10119 rtx real;
10121 real = copy_rtx (PATTERN (insn));
10123 real = replace_rtx (real, reg, GEN_INT (val));
10125 RTX_FRAME_RELATED_P (insn) = 1;
10126 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
10127 real,
10128 REG_NOTES (insn));
10131 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
10132 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
10133 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
10134 deduce these equivalences by itself so it wasn't necessary to hold
10135 its hand so much. */
10137 static void
10138 rs6000_frame_related (insn, reg, val, reg2, rreg)
10139 rtx insn;
10140 rtx reg;
10141 HOST_WIDE_INT val;
10142 rtx reg2;
10143 rtx rreg;
10145 rtx real, temp;
10147 /* copy_rtx will not make unique copies of registers, so we need to
10148 ensure we don't have unwanted sharing here. */
10149 if (reg == reg2)
10150 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
10152 if (reg == rreg)
10153 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
10155 real = copy_rtx (PATTERN (insn));
10157 if (reg2 != NULL_RTX)
10158 real = replace_rtx (real, reg2, rreg);
10160 real = replace_rtx (real, reg,
10161 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
10162 STACK_POINTER_REGNUM),
10163 GEN_INT (val)));
10165 /* We expect that 'real' is either a SET or a PARALLEL containing
10166 SETs (and possibly other stuff). In a PARALLEL, all the SETs
10167 are important so they all have to be marked RTX_FRAME_RELATED_P. */
10169 if (GET_CODE (real) == SET)
10171 rtx set = real;
10173 temp = simplify_rtx (SET_SRC (set));
10174 if (temp)
10175 SET_SRC (set) = temp;
10176 temp = simplify_rtx (SET_DEST (set));
10177 if (temp)
10178 SET_DEST (set) = temp;
10179 if (GET_CODE (SET_DEST (set)) == MEM)
10181 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
10182 if (temp)
10183 XEXP (SET_DEST (set), 0) = temp;
10186 else if (GET_CODE (real) == PARALLEL)
10188 int i;
10189 for (i = 0; i < XVECLEN (real, 0); i++)
10190 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
10192 rtx set = XVECEXP (real, 0, i);
10194 temp = simplify_rtx (SET_SRC (set));
10195 if (temp)
10196 SET_SRC (set) = temp;
10197 temp = simplify_rtx (SET_DEST (set));
10198 if (temp)
10199 SET_DEST (set) = temp;
10200 if (GET_CODE (SET_DEST (set)) == MEM)
10202 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
10203 if (temp)
10204 XEXP (SET_DEST (set), 0) = temp;
10206 RTX_FRAME_RELATED_P (set) = 1;
10209 else
10210 abort ();
10212 if (TARGET_SPE)
10213 real = spe_synthesize_frame_save (real);
10215 RTX_FRAME_RELATED_P (insn) = 1;
10216 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
10217 real,
10218 REG_NOTES (insn));
10221 /* Given an SPE frame note, return a PARALLEL of SETs with the
10222 original note, plus a synthetic register save. */
10224 static rtx
10225 spe_synthesize_frame_save (real)
10226 rtx real;
10228 rtx synth, offset, reg, real2;
10230 if (GET_CODE (real) != SET
10231 || GET_MODE (SET_SRC (real)) != V2SImode)
10232 return real;
10234 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
10235 frame related note. The parallel contains a set of the register
10236 being saved, and another set to a synthetic register (n+113).
10237 This is so we can differentiate between 64-bit and 32-bit saves.
10238 Words cannot describe this nastiness. */
10240 if (GET_CODE (SET_DEST (real)) != MEM
10241 || GET_CODE (XEXP (SET_DEST (real), 0)) != PLUS
10242 || GET_CODE (SET_SRC (real)) != REG)
10243 abort ();
10245 /* Transform:
10246 (set (mem (plus (reg x) (const y)))
10247 (reg z))
10248 into:
10249 (set (mem (plus (reg x) (const y+4)))
10250 (reg z+113))
10253 real2 = copy_rtx (real);
10254 PUT_MODE (SET_DEST (real2), SImode);
10255 reg = SET_SRC (real2);
10256 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
10257 synth = copy_rtx (real2);
10259 if (BYTES_BIG_ENDIAN)
10261 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
10262 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
10265 reg = SET_SRC (synth);
10266 /* FIXME: the ABI says REGNO+1200, but this creates a huge hole
10267 in the unwinder tables. I'm still unsure what to do. */
10268 synth = replace_rtx (synth, reg,
10269 gen_rtx_REG (SImode, REGNO (reg) + 113));
10271 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
10272 synth = replace_rtx (synth, offset,
10273 GEN_INT (INTVAL (offset)
10274 + (BYTES_BIG_ENDIAN ? 0 : 4)));
10276 RTX_FRAME_RELATED_P (synth) = 1;
10277 RTX_FRAME_RELATED_P (real2) = 1;
10278 if (BYTES_BIG_ENDIAN)
10279 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
10280 else
10281 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
10283 return real;
10286 /* Returns an insn that has a vrsave set operation with the
10287 appropriate CLOBBERs. */
10289 static rtx
10290 generate_set_vrsave (reg, info, epiloguep)
10291 rtx reg;
10292 rs6000_stack_t *info;
10293 int epiloguep;
10295 int nclobs, i;
10296 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
10297 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
10299 clobs[0]
10300 = gen_rtx_SET (VOIDmode,
10301 vrsave,
10302 gen_rtx_UNSPEC_VOLATILE (SImode,
10303 gen_rtvec (2, reg, vrsave),
10304 30));
10306 nclobs = 1;
10308 /* We need to clobber the registers in the mask so the scheduler
10309 does not move sets to VRSAVE before sets of AltiVec registers.
10311 However, if the function receives nonlocal gotos, reload will set
10312 all call saved registers live. We will end up with:
10314 (set (reg 999) (mem))
10315 (parallel [ (set (reg vrsave) (unspec blah))
10316 (clobber (reg 999))])
10318 The clobber will cause the store into reg 999 to be dead, and
10319 flow will attempt to delete an epilogue insn. In this case, we
10320 need an unspec use/set of the register. */
10322 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
10323 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
10325 if (!epiloguep || call_used_regs [i])
10326 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
10327 gen_rtx_REG (V4SImode, i));
10328 else
10330 rtx reg = gen_rtx_REG (V4SImode, i);
10332 clobs[nclobs++]
10333 = gen_rtx_SET (VOIDmode,
10334 reg,
10335 gen_rtx_UNSPEC (V4SImode,
10336 gen_rtvec (1, reg), 27));
10340 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
10342 for (i = 0; i < nclobs; ++i)
10343 XVECEXP (insn, 0, i) = clobs[i];
10345 return insn;
10348 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
10349 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
10351 static void
10352 emit_frame_save (frame_reg, frame_ptr, mode, regno, offset, total_size)
10353 rtx frame_reg;
10354 rtx frame_ptr;
10355 enum machine_mode mode;
10356 unsigned int regno;
10357 int offset;
10358 int total_size;
10360 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
10361 rtx replacea, replaceb;
10363 int_rtx = GEN_INT (offset);
10365 /* Some cases that need register indexed addressing. */
10366 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
10367 || (TARGET_SPE_ABI
10368 && SPE_VECTOR_MODE (mode)
10369 && !SPE_CONST_OFFSET_OK (offset)))
10371 /* Whomever calls us must make sure r11 is available in the
10372 flow path of instructions in the prologue. */
10373 offset_rtx = gen_rtx_REG (Pmode, 11);
10374 emit_move_insn (offset_rtx, int_rtx);
10376 replacea = offset_rtx;
10377 replaceb = int_rtx;
10379 else
10381 offset_rtx = int_rtx;
10382 replacea = NULL_RTX;
10383 replaceb = NULL_RTX;
10386 reg = gen_rtx_REG (mode, regno);
10387 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
10388 mem = gen_rtx_MEM (mode, addr);
10389 set_mem_alias_set (mem, rs6000_sr_alias_set);
10391 insn = emit_move_insn (mem, reg);
10393 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
10396 /* Emit an offset memory reference suitable for a frame store, while
10397 converting to a valid addressing mode. */
10399 static rtx
10400 gen_frame_mem_offset (mode, reg, offset)
10401 enum machine_mode mode;
10402 rtx reg;
10403 int offset;
10405 rtx int_rtx, offset_rtx;
10407 int_rtx = GEN_INT (offset);
10409 if (TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
10411 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
10412 emit_move_insn (offset_rtx, int_rtx);
10414 else
10415 offset_rtx = int_rtx;
10417 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
10420 /* Emit function prologue as insns. */
10422 void
10423 rs6000_emit_prologue ()
10425 rs6000_stack_t *info = rs6000_stack_info ();
10426 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
10427 int reg_size = TARGET_POWERPC64 ? 8 : 4;
10428 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
10429 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
10430 rtx frame_reg_rtx = sp_reg_rtx;
10431 rtx cr_save_rtx = NULL;
10432 rtx insn;
10433 int saving_FPRs_inline;
10434 int using_store_multiple;
10435 HOST_WIDE_INT sp_offset = 0;
10437 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
10439 reg_mode = V2SImode;
10440 reg_size = 8;
10443 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
10444 && (!TARGET_SPE_ABI
10445 || info->spe_64bit_regs_used == 0)
10446 && info->first_gp_reg_save < 31);
10447 saving_FPRs_inline = (info->first_fp_reg_save == 64
10448 || FP_SAVE_INLINE (info->first_fp_reg_save));
10450 /* For V.4, update stack before we do any saving and set back pointer. */
10451 if (info->push_p && DEFAULT_ABI == ABI_V4)
10453 if (info->total_size < 32767)
10454 sp_offset = info->total_size;
10455 else
10456 frame_reg_rtx = frame_ptr_rtx;
10457 rs6000_emit_allocate_stack (info->total_size,
10458 (frame_reg_rtx != sp_reg_rtx
10459 && (info->cr_save_p
10460 || info->lr_save_p
10461 || info->first_fp_reg_save < 64
10462 || info->first_gp_reg_save < 32
10463 )));
10464 if (frame_reg_rtx != sp_reg_rtx)
10465 rs6000_emit_stack_tie ();
10468 /* Save AltiVec registers if needed. */
10469 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
10471 int i;
10473 /* There should be a non inline version of this, for when we
10474 are saving lots of vector registers. */
10475 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
10476 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
10478 rtx areg, savereg, mem;
10479 int offset;
10481 offset = info->altivec_save_offset + sp_offset
10482 + 16 * (i - info->first_altivec_reg_save);
10484 savereg = gen_rtx_REG (V4SImode, i);
10486 areg = gen_rtx_REG (Pmode, 0);
10487 emit_move_insn (areg, GEN_INT (offset));
10489 /* AltiVec addressing mode is [reg+reg]. */
10490 mem = gen_rtx_MEM (V4SImode,
10491 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
10493 set_mem_alias_set (mem, rs6000_sr_alias_set);
10495 insn = emit_move_insn (mem, savereg);
10497 altivec_frame_fixup (insn, areg, offset);
10501 /* VRSAVE is a bit vector representing which AltiVec registers
10502 are used. The OS uses this to determine which vector
10503 registers to save on a context switch. We need to save
10504 VRSAVE on the stack frame, add whatever AltiVec registers we
10505 used in this function, and do the corresponding magic in the
10506 epilogue. */
10508 if (TARGET_ALTIVEC && info->vrsave_mask != 0)
10510 rtx reg, mem, vrsave;
10511 int offset;
10513 /* Get VRSAVE onto a GPR. */
10514 reg = gen_rtx_REG (SImode, 12);
10515 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
10516 if (TARGET_MACHO)
10517 emit_insn (gen_get_vrsave_internal (reg));
10518 else
10519 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
10521 /* Save VRSAVE. */
10522 offset = info->vrsave_save_offset + sp_offset;
10524 = gen_rtx_MEM (SImode,
10525 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
10526 set_mem_alias_set (mem, rs6000_sr_alias_set);
10527 insn = emit_move_insn (mem, reg);
10529 /* Include the registers in the mask. */
10530 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
10532 insn = emit_insn (generate_set_vrsave (reg, info, 0));
10535 /* If we use the link register, get it into r0. */
10536 if (info->lr_save_p)
10537 emit_move_insn (gen_rtx_REG (Pmode, 0),
10538 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
10540 /* If we need to save CR, put it into r12. */
10541 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
10543 cr_save_rtx = gen_rtx_REG (SImode, 12);
10544 emit_insn (gen_movesi_from_cr (cr_save_rtx));
10547 /* Do any required saving of fpr's. If only one or two to save, do
10548 it ourselves. Otherwise, call function. */
10549 if (saving_FPRs_inline)
10551 int i;
10552 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
10553 if ((regs_ever_live[info->first_fp_reg_save+i]
10554 && ! call_used_regs[info->first_fp_reg_save+i]))
10555 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
10556 info->first_fp_reg_save + i,
10557 info->fp_save_offset + sp_offset + 8 * i,
10558 info->total_size);
10560 else if (info->first_fp_reg_save != 64)
10562 int i;
10563 char rname[30];
10564 const char *alloc_rname;
10565 rtvec p;
10566 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
10568 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
10569 gen_rtx_REG (Pmode,
10570 LINK_REGISTER_REGNUM));
10571 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
10572 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
10573 alloc_rname = ggc_strdup (rname);
10574 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
10575 gen_rtx_SYMBOL_REF (Pmode,
10576 alloc_rname));
10577 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
10579 rtx addr, reg, mem;
10580 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
10581 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10582 GEN_INT (info->fp_save_offset
10583 + sp_offset + 8*i));
10584 mem = gen_rtx_MEM (DFmode, addr);
10585 set_mem_alias_set (mem, rs6000_sr_alias_set);
10587 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
10589 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10590 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10591 NULL_RTX, NULL_RTX);
10594 /* Save GPRs. This is done as a PARALLEL if we are using
10595 the store-multiple instructions. */
10596 if (using_store_multiple)
10598 rtvec p;
10599 int i;
10600 p = rtvec_alloc (32 - info->first_gp_reg_save);
10601 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10603 rtx addr, reg, mem;
10604 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
10605 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10606 GEN_INT (info->gp_save_offset
10607 + sp_offset
10608 + reg_size * i));
10609 mem = gen_rtx_MEM (reg_mode, addr);
10610 set_mem_alias_set (mem, rs6000_sr_alias_set);
10612 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
10614 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10615 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10616 NULL_RTX, NULL_RTX);
10618 else
10620 int i;
10621 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10622 if ((regs_ever_live[info->first_gp_reg_save+i]
10623 && ! call_used_regs[info->first_gp_reg_save+i])
10624 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
10625 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
10626 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
10628 rtx addr, reg, mem;
10629 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
10631 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
10633 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
10634 rtx b;
10636 if (!SPE_CONST_OFFSET_OK (offset))
10638 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
10639 emit_move_insn (b, GEN_INT (offset));
10641 else
10642 b = GEN_INT (offset);
10644 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
10645 mem = gen_rtx_MEM (V2SImode, addr);
10646 set_mem_alias_set (mem, rs6000_sr_alias_set);
10647 insn = emit_move_insn (mem, reg);
10649 if (GET_CODE (b) == CONST_INT)
10650 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10651 NULL_RTX, NULL_RTX);
10652 else
10653 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10654 b, GEN_INT (offset));
10656 else
10658 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10659 GEN_INT (info->gp_save_offset
10660 + sp_offset
10661 + reg_size * i));
10662 mem = gen_rtx_MEM (reg_mode, addr);
10663 set_mem_alias_set (mem, rs6000_sr_alias_set);
10665 insn = emit_move_insn (mem, reg);
10666 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10667 NULL_RTX, NULL_RTX);
10672 /* ??? There's no need to emit actual instructions here, but it's the
10673 easiest way to get the frame unwind information emitted. */
10674 if (current_function_calls_eh_return)
10676 unsigned int i, regno;
10678 for (i = 0; ; ++i)
10680 regno = EH_RETURN_DATA_REGNO (i);
10681 if (regno == INVALID_REGNUM)
10682 break;
10684 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
10685 info->ehrd_offset + sp_offset
10686 + reg_size * (int) i,
10687 info->total_size);
10691 /* Save lr if we used it. */
10692 if (info->lr_save_p)
10694 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10695 GEN_INT (info->lr_save_offset + sp_offset));
10696 rtx reg = gen_rtx_REG (Pmode, 0);
10697 rtx mem = gen_rtx_MEM (Pmode, addr);
10698 /* This should not be of rs6000_sr_alias_set, because of
10699 __builtin_return_address. */
10701 insn = emit_move_insn (mem, reg);
10702 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10703 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
10706 /* Save CR if we use any that must be preserved. */
10707 if (info->cr_save_p)
10709 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10710 GEN_INT (info->cr_save_offset + sp_offset));
10711 rtx mem = gen_rtx_MEM (SImode, addr);
10713 set_mem_alias_set (mem, rs6000_sr_alias_set);
10715 /* If r12 was used to hold the original sp, copy cr into r0 now
10716 that it's free. */
10717 if (REGNO (frame_reg_rtx) == 12)
10719 cr_save_rtx = gen_rtx_REG (SImode, 0);
10720 emit_insn (gen_movesi_from_cr (cr_save_rtx));
10722 insn = emit_move_insn (mem, cr_save_rtx);
10724 /* Now, there's no way that dwarf2out_frame_debug_expr is going
10725 to understand '(unspec:SI [(reg:CC 68) ...] 19)'. But that's
10726 OK. All we have to do is specify that _one_ condition code
10727 register is saved in this stack slot. The thrower's epilogue
10728 will then restore all the call-saved registers.
10729 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
10730 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10731 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
10734 /* Update stack and set back pointer unless this is V.4,
10735 for which it was done previously. */
10736 if (info->push_p && DEFAULT_ABI != ABI_V4)
10737 rs6000_emit_allocate_stack (info->total_size, FALSE);
10739 /* Set frame pointer, if needed. */
10740 if (frame_pointer_needed)
10742 insn = emit_move_insn (gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM),
10743 sp_reg_rtx);
10744 RTX_FRAME_RELATED_P (insn) = 1;
10747 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
10748 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
10749 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
10750 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
10752 /* If emit_load_toc_table will use the link register, we need to save
10753 it. We use R11 for this purpose because emit_load_toc_table
10754 can use register 0. This allows us to use a plain 'blr' to return
10755 from the procedure more often. */
10756 int save_LR_around_toc_setup = (TARGET_ELF && flag_pic != 0
10757 && ! info->lr_save_p
10758 && EXIT_BLOCK_PTR->pred != NULL);
10759 if (save_LR_around_toc_setup)
10760 emit_move_insn (gen_rtx_REG (Pmode, 11),
10761 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
10763 rs6000_emit_load_toc_table (TRUE);
10765 if (save_LR_around_toc_setup)
10766 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
10767 gen_rtx_REG (Pmode, 11));
10770 #if TARGET_MACHO
10771 if (DEFAULT_ABI == ABI_DARWIN
10772 && flag_pic && current_function_uses_pic_offset_table)
10774 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
10775 const char *picbase = machopic_function_base_name ();
10776 rtx src = gen_rtx_SYMBOL_REF (Pmode, ggc_alloc_string (picbase, -1));
10778 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest, src)));
10780 rs6000_maybe_dead (
10781 emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
10782 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
10784 #endif
10787 /* Write function prologue. */
10789 static void
10790 rs6000_output_function_prologue (file, size)
10791 FILE *file;
10792 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
10794 rs6000_stack_t *info = rs6000_stack_info ();
10796 if (TARGET_DEBUG_STACK)
10797 debug_stack_info (info);
10799 /* Write .extern for any function we will call to save and restore
10800 fp values. */
10801 if (info->first_fp_reg_save < 64
10802 && !FP_SAVE_INLINE (info->first_fp_reg_save))
10803 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
10804 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
10805 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
10806 RESTORE_FP_SUFFIX);
10808 /* Write .extern for AIX common mode routines, if needed. */
10809 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
10811 fputs ("\t.extern __mulh\n", file);
10812 fputs ("\t.extern __mull\n", file);
10813 fputs ("\t.extern __divss\n", file);
10814 fputs ("\t.extern __divus\n", file);
10815 fputs ("\t.extern __quoss\n", file);
10816 fputs ("\t.extern __quous\n", file);
10817 common_mode_defined = 1;
10820 if (! HAVE_prologue)
10822 start_sequence ();
10824 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
10825 the "toplevel" insn chain. */
10826 emit_note (0, NOTE_INSN_DELETED);
10827 rs6000_emit_prologue ();
10828 emit_note (0, NOTE_INSN_DELETED);
10830 /* Expand INSN_ADDRESSES so final() doesn't crash. */
10832 rtx insn;
10833 unsigned addr = 0;
10834 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
10836 INSN_ADDRESSES_NEW (insn, addr);
10837 addr += 4;
10841 if (TARGET_DEBUG_STACK)
10842 debug_rtx_list (get_insns (), 100);
10843 final (get_insns (), file, FALSE, FALSE);
10844 end_sequence ();
10847 rs6000_pic_labelno++;
10850 /* Emit function epilogue as insns.
10852 At present, dwarf2out_frame_debug_expr doesn't understand
10853 register restores, so we don't bother setting RTX_FRAME_RELATED_P
10854 anywhere in the epilogue. Most of the insns below would in any case
10855 need special notes to explain where r11 is in relation to the stack. */
10857 void
10858 rs6000_emit_epilogue (sibcall)
10859 int sibcall;
10861 rs6000_stack_t *info;
10862 int restoring_FPRs_inline;
10863 int using_load_multiple;
10864 int using_mfcr_multiple;
10865 int use_backchain_to_restore_sp;
10866 int sp_offset = 0;
10867 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
10868 rtx frame_reg_rtx = sp_reg_rtx;
10869 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
10870 int reg_size = TARGET_POWERPC64 ? 8 : 4;
10871 int i;
10873 info = rs6000_stack_info ();
10875 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
10877 reg_mode = V2SImode;
10878 reg_size = 8;
10881 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
10882 && (!TARGET_SPE_ABI
10883 || info->spe_64bit_regs_used == 0)
10884 && info->first_gp_reg_save < 31);
10885 restoring_FPRs_inline = (sibcall
10886 || current_function_calls_eh_return
10887 || info->first_fp_reg_save == 64
10888 || FP_SAVE_INLINE (info->first_fp_reg_save));
10889 use_backchain_to_restore_sp = (frame_pointer_needed
10890 || current_function_calls_alloca
10891 || info->total_size > 32767);
10892 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
10893 || rs6000_cpu == PROCESSOR_PPC603
10894 || rs6000_cpu == PROCESSOR_PPC750
10895 || optimize_size);
10897 /* If we have a frame pointer, a call to alloca, or a large stack
10898 frame, restore the old stack pointer using the backchain. Otherwise,
10899 we know what size to update it with. */
10900 if (use_backchain_to_restore_sp)
10902 /* Under V.4, don't reset the stack pointer until after we're done
10903 loading the saved registers. */
10904 if (DEFAULT_ABI == ABI_V4)
10905 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
10907 emit_move_insn (frame_reg_rtx,
10908 gen_rtx_MEM (Pmode, sp_reg_rtx));
10911 else if (info->push_p)
10913 if (DEFAULT_ABI == ABI_V4)
10914 sp_offset = info->total_size;
10915 else
10917 emit_insn (TARGET_32BIT
10918 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
10919 GEN_INT (info->total_size))
10920 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
10921 GEN_INT (info->total_size)));
10925 /* Restore AltiVec registers if needed. */
10926 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
10928 int i;
10930 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
10931 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
10933 rtx addr, areg, mem;
10935 areg = gen_rtx_REG (Pmode, 0);
10936 emit_move_insn
10937 (areg, GEN_INT (info->altivec_save_offset
10938 + sp_offset
10939 + 16 * (i - info->first_altivec_reg_save)));
10941 /* AltiVec addressing mode is [reg+reg]. */
10942 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
10943 mem = gen_rtx_MEM (V4SImode, addr);
10944 set_mem_alias_set (mem, rs6000_sr_alias_set);
10946 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
10950 /* Restore VRSAVE if needed. */
10951 if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
10953 rtx addr, mem, reg;
10955 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10956 GEN_INT (info->vrsave_save_offset + sp_offset));
10957 mem = gen_rtx_MEM (SImode, addr);
10958 set_mem_alias_set (mem, rs6000_sr_alias_set);
10959 reg = gen_rtx_REG (SImode, 12);
10960 emit_move_insn (reg, mem);
10962 emit_insn (generate_set_vrsave (reg, info, 1));
10965 /* Get the old lr if we saved it. */
10966 if (info->lr_save_p)
10968 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
10969 info->lr_save_offset + sp_offset);
10971 set_mem_alias_set (mem, rs6000_sr_alias_set);
10973 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
10976 /* Get the old cr if we saved it. */
10977 if (info->cr_save_p)
10979 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10980 GEN_INT (info->cr_save_offset + sp_offset));
10981 rtx mem = gen_rtx_MEM (SImode, addr);
10983 set_mem_alias_set (mem, rs6000_sr_alias_set);
10985 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
10988 /* Set LR here to try to overlap restores below. */
10989 if (info->lr_save_p)
10990 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
10991 gen_rtx_REG (Pmode, 0));
10993 /* Load exception handler data registers, if needed. */
10994 if (current_function_calls_eh_return)
10996 unsigned int i, regno;
10998 for (i = 0; ; ++i)
11000 rtx mem;
11002 regno = EH_RETURN_DATA_REGNO (i);
11003 if (regno == INVALID_REGNUM)
11004 break;
11006 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
11007 info->ehrd_offset + sp_offset
11008 + reg_size * (int) i);
11009 set_mem_alias_set (mem, rs6000_sr_alias_set);
11011 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
11015 /* Restore GPRs. This is done as a PARALLEL if we are using
11016 the load-multiple instructions. */
11017 if (using_load_multiple)
11019 rtvec p;
11020 p = rtvec_alloc (32 - info->first_gp_reg_save);
11021 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11023 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11024 GEN_INT (info->gp_save_offset
11025 + sp_offset
11026 + reg_size * i));
11027 rtx mem = gen_rtx_MEM (reg_mode, addr);
11029 set_mem_alias_set (mem, rs6000_sr_alias_set);
11031 RTVEC_ELT (p, i) =
11032 gen_rtx_SET (VOIDmode,
11033 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
11034 mem);
11036 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11038 else
11039 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11040 if ((regs_ever_live[info->first_gp_reg_save+i]
11041 && ! call_used_regs[info->first_gp_reg_save+i])
11042 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
11043 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
11044 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
11046 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11047 GEN_INT (info->gp_save_offset
11048 + sp_offset
11049 + reg_size * i));
11050 rtx mem = gen_rtx_MEM (reg_mode, addr);
11052 /* Restore 64-bit quantities for SPE. */
11053 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11055 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
11056 rtx b;
11058 if (!SPE_CONST_OFFSET_OK (offset))
11060 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11061 emit_move_insn (b, GEN_INT (offset));
11063 else
11064 b = GEN_INT (offset);
11066 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
11067 mem = gen_rtx_MEM (V2SImode, addr);
11070 set_mem_alias_set (mem, rs6000_sr_alias_set);
11072 emit_move_insn (gen_rtx_REG (reg_mode,
11073 info->first_gp_reg_save + i), mem);
11076 /* Restore fpr's if we need to do it without calling a function. */
11077 if (restoring_FPRs_inline)
11078 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11079 if ((regs_ever_live[info->first_fp_reg_save+i]
11080 && ! call_used_regs[info->first_fp_reg_save+i]))
11082 rtx addr, mem;
11083 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11084 GEN_INT (info->fp_save_offset
11085 + sp_offset
11086 + 8 * i));
11087 mem = gen_rtx_MEM (DFmode, addr);
11088 set_mem_alias_set (mem, rs6000_sr_alias_set);
11090 emit_move_insn (gen_rtx_REG (DFmode,
11091 info->first_fp_reg_save + i),
11092 mem);
11095 /* If we saved cr, restore it here. Just those that were used. */
11096 if (info->cr_save_p)
11098 rtx r12_rtx = gen_rtx_REG (SImode, 12);
11099 int count = 0;
11101 if (using_mfcr_multiple)
11103 for (i = 0; i < 8; i++)
11104 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
11105 count++;
11106 if (count == 0)
11107 abort ();
11110 if (using_mfcr_multiple && count > 1)
11112 rtvec p;
11113 int ndx;
11115 p = rtvec_alloc (count);
11117 ndx = 0;
11118 for (i = 0; i < 8; i++)
11119 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
11121 rtvec r = rtvec_alloc (2);
11122 RTVEC_ELT (r, 0) = r12_rtx;
11123 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
11124 RTVEC_ELT (p, ndx) =
11125 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
11126 gen_rtx_UNSPEC (CCmode, r, 20));
11127 ndx++;
11129 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11130 if (ndx != count)
11131 abort ();
11133 else
11134 for (i = 0; i < 8; i++)
11135 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
11137 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
11138 CR0_REGNO+i),
11139 r12_rtx));
11143 /* If this is V.4, unwind the stack pointer after all of the loads
11144 have been done. We need to emit a block here so that sched
11145 doesn't decide to move the sp change before the register restores
11146 (which may not have any obvious dependency on the stack). This
11147 doesn't hurt performance, because there is no scheduling that can
11148 be done after this point. */
11149 if (DEFAULT_ABI == ABI_V4)
11151 if (frame_reg_rtx != sp_reg_rtx)
11152 rs6000_emit_stack_tie ();
11154 if (use_backchain_to_restore_sp)
11156 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
11158 else if (sp_offset != 0)
11160 emit_insn (TARGET_32BIT
11161 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
11162 GEN_INT (sp_offset))
11163 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
11164 GEN_INT (sp_offset)));
11168 if (current_function_calls_eh_return)
11170 rtx sa = EH_RETURN_STACKADJ_RTX;
11171 emit_insn (TARGET_32BIT
11172 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
11173 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
11176 if (!sibcall)
11178 rtvec p;
11179 if (! restoring_FPRs_inline)
11180 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
11181 else
11182 p = rtvec_alloc (2);
11184 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
11185 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
11186 gen_rtx_REG (Pmode,
11187 LINK_REGISTER_REGNUM));
11189 /* If we have to restore more than two FP registers, branch to the
11190 restore function. It will return to our caller. */
11191 if (! restoring_FPRs_inline)
11193 int i;
11194 char rname[30];
11195 const char *alloc_rname;
11197 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
11198 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
11199 alloc_rname = ggc_strdup (rname);
11200 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
11201 gen_rtx_SYMBOL_REF (Pmode,
11202 alloc_rname));
11204 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11206 rtx addr, mem;
11207 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
11208 GEN_INT (info->fp_save_offset + 8*i));
11209 mem = gen_rtx_MEM (DFmode, addr);
11210 set_mem_alias_set (mem, rs6000_sr_alias_set);
11212 RTVEC_ELT (p, i+3) =
11213 gen_rtx_SET (VOIDmode,
11214 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
11215 mem);
11219 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
11223 /* Write function epilogue. */
11225 static void
11226 rs6000_output_function_epilogue (file, size)
11227 FILE *file;
11228 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
11230 rs6000_stack_t *info = rs6000_stack_info ();
11232 if (! HAVE_epilogue)
11234 rtx insn = get_last_insn ();
11235 /* If the last insn was a BARRIER, we don't have to write anything except
11236 the trace table. */
11237 if (GET_CODE (insn) == NOTE)
11238 insn = prev_nonnote_insn (insn);
11239 if (insn == 0 || GET_CODE (insn) != BARRIER)
11241 /* This is slightly ugly, but at least we don't have two
11242 copies of the epilogue-emitting code. */
11243 start_sequence ();
11245 /* A NOTE_INSN_DELETED is supposed to be at the start
11246 and end of the "toplevel" insn chain. */
11247 emit_note (0, NOTE_INSN_DELETED);
11248 rs6000_emit_epilogue (FALSE);
11249 emit_note (0, NOTE_INSN_DELETED);
11251 /* Expand INSN_ADDRESSES so final() doesn't crash. */
11253 rtx insn;
11254 unsigned addr = 0;
11255 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
11257 INSN_ADDRESSES_NEW (insn, addr);
11258 addr += 4;
11262 if (TARGET_DEBUG_STACK)
11263 debug_rtx_list (get_insns (), 100);
11264 final (get_insns (), file, FALSE, FALSE);
11265 end_sequence ();
11269 /* Output a traceback table here. See /usr/include/sys/debug.h for info
11270 on its format.
11272 We don't output a traceback table if -finhibit-size-directive was
11273 used. The documentation for -finhibit-size-directive reads
11274 ``don't output a @code{.size} assembler directive, or anything
11275 else that would cause trouble if the function is split in the
11276 middle, and the two halves are placed at locations far apart in
11277 memory.'' The traceback table has this property, since it
11278 includes the offset from the start of the function to the
11279 traceback table itself.
11281 System V.4 Powerpc's (and the embedded ABI derived from it) use a
11282 different traceback table. */
11283 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
11284 && rs6000_traceback != traceback_none)
11286 const char *fname = NULL;
11287 const char *language_string = lang_hooks.name;
11288 int fixed_parms = 0, float_parms = 0, parm_info = 0;
11289 int i;
11290 int optional_tbtab;
11292 if (rs6000_traceback == traceback_full)
11293 optional_tbtab = 1;
11294 else if (rs6000_traceback == traceback_part)
11295 optional_tbtab = 0;
11296 else
11297 optional_tbtab = !optimize_size && !TARGET_ELF;
11299 if (optional_tbtab)
11301 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
11302 while (*fname == '.') /* V.4 encodes . in the name */
11303 fname++;
11305 /* Need label immediately before tbtab, so we can compute
11306 its offset from the function start. */
11307 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
11308 ASM_OUTPUT_LABEL (file, fname);
11311 /* The .tbtab pseudo-op can only be used for the first eight
11312 expressions, since it can't handle the possibly variable
11313 length fields that follow. However, if you omit the optional
11314 fields, the assembler outputs zeros for all optional fields
11315 anyways, giving each variable length field is minimum length
11316 (as defined in sys/debug.h). Thus we can not use the .tbtab
11317 pseudo-op at all. */
11319 /* An all-zero word flags the start of the tbtab, for debuggers
11320 that have to find it by searching forward from the entry
11321 point or from the current pc. */
11322 fputs ("\t.long 0\n", file);
11324 /* Tbtab format type. Use format type 0. */
11325 fputs ("\t.byte 0,", file);
11327 /* Language type. Unfortunately, there doesn't seem to be any
11328 official way to get this info, so we use language_string. C
11329 is 0. C++ is 9. No number defined for Obj-C, so use the
11330 value for C for now. There is no official value for Java,
11331 although IBM appears to be using 13. There is no official value
11332 for Chill, so we've chosen 44 pseudo-randomly. */
11333 if (! strcmp (language_string, "GNU C")
11334 || ! strcmp (language_string, "GNU Objective-C"))
11335 i = 0;
11336 else if (! strcmp (language_string, "GNU F77"))
11337 i = 1;
11338 else if (! strcmp (language_string, "GNU Ada"))
11339 i = 3;
11340 else if (! strcmp (language_string, "GNU Pascal"))
11341 i = 2;
11342 else if (! strcmp (language_string, "GNU C++"))
11343 i = 9;
11344 else if (! strcmp (language_string, "GNU Java"))
11345 i = 13;
11346 else if (! strcmp (language_string, "GNU CHILL"))
11347 i = 44;
11348 else
11349 abort ();
11350 fprintf (file, "%d,", i);
11352 /* 8 single bit fields: global linkage (not set for C extern linkage,
11353 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
11354 from start of procedure stored in tbtab, internal function, function
11355 has controlled storage, function has no toc, function uses fp,
11356 function logs/aborts fp operations. */
11357 /* Assume that fp operations are used if any fp reg must be saved. */
11358 fprintf (file, "%d,",
11359 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
11361 /* 6 bitfields: function is interrupt handler, name present in
11362 proc table, function calls alloca, on condition directives
11363 (controls stack walks, 3 bits), saves condition reg, saves
11364 link reg. */
11365 /* The `function calls alloca' bit seems to be set whenever reg 31 is
11366 set up as a frame pointer, even when there is no alloca call. */
11367 fprintf (file, "%d,",
11368 ((optional_tbtab << 6)
11369 | ((optional_tbtab & frame_pointer_needed) << 5)
11370 | (info->cr_save_p << 1)
11371 | (info->lr_save_p)));
11373 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
11374 (6 bits). */
11375 fprintf (file, "%d,",
11376 (info->push_p << 7) | (64 - info->first_fp_reg_save));
11378 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
11379 fprintf (file, "%d,", (32 - first_reg_to_save ()));
11381 if (optional_tbtab)
11383 /* Compute the parameter info from the function decl argument
11384 list. */
11385 tree decl;
11386 int next_parm_info_bit = 31;
11388 for (decl = DECL_ARGUMENTS (current_function_decl);
11389 decl; decl = TREE_CHAIN (decl))
11391 rtx parameter = DECL_INCOMING_RTL (decl);
11392 enum machine_mode mode = GET_MODE (parameter);
11394 if (GET_CODE (parameter) == REG)
11396 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
11398 int bits;
11400 float_parms++;
11402 if (mode == SFmode)
11403 bits = 0x2;
11404 else if (mode == DFmode || mode == TFmode)
11405 bits = 0x3;
11406 else
11407 abort ();
11409 /* If only one bit will fit, don't or in this entry. */
11410 if (next_parm_info_bit > 0)
11411 parm_info |= (bits << (next_parm_info_bit - 1));
11412 next_parm_info_bit -= 2;
11414 else
11416 fixed_parms += ((GET_MODE_SIZE (mode)
11417 + (UNITS_PER_WORD - 1))
11418 / UNITS_PER_WORD);
11419 next_parm_info_bit -= 1;
11425 /* Number of fixed point parameters. */
11426 /* This is actually the number of words of fixed point parameters; thus
11427 an 8 byte struct counts as 2; and thus the maximum value is 8. */
11428 fprintf (file, "%d,", fixed_parms);
11430 /* 2 bitfields: number of floating point parameters (7 bits), parameters
11431 all on stack. */
11432 /* This is actually the number of fp registers that hold parameters;
11433 and thus the maximum value is 13. */
11434 /* Set parameters on stack bit if parameters are not in their original
11435 registers, regardless of whether they are on the stack? Xlc
11436 seems to set the bit when not optimizing. */
11437 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
11439 if (! optional_tbtab)
11440 return;
11442 /* Optional fields follow. Some are variable length. */
11444 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
11445 11 double float. */
11446 /* There is an entry for each parameter in a register, in the order that
11447 they occur in the parameter list. Any intervening arguments on the
11448 stack are ignored. If the list overflows a long (max possible length
11449 34 bits) then completely leave off all elements that don't fit. */
11450 /* Only emit this long if there was at least one parameter. */
11451 if (fixed_parms || float_parms)
11452 fprintf (file, "\t.long %d\n", parm_info);
11454 /* Offset from start of code to tb table. */
11455 fputs ("\t.long ", file);
11456 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
11457 #if TARGET_AIX
11458 RS6000_OUTPUT_BASENAME (file, fname);
11459 #else
11460 assemble_name (file, fname);
11461 #endif
11462 fputs ("-.", file);
11463 #if TARGET_AIX
11464 RS6000_OUTPUT_BASENAME (file, fname);
11465 #else
11466 assemble_name (file, fname);
11467 #endif
11468 putc ('\n', file);
11470 /* Interrupt handler mask. */
11471 /* Omit this long, since we never set the interrupt handler bit
11472 above. */
11474 /* Number of CTL (controlled storage) anchors. */
11475 /* Omit this long, since the has_ctl bit is never set above. */
11477 /* Displacement into stack of each CTL anchor. */
11478 /* Omit this list of longs, because there are no CTL anchors. */
11480 /* Length of function name. */
11481 if (*fname == '*')
11482 ++fname;
11483 fprintf (file, "\t.short %d\n", (int) strlen (fname));
11485 /* Function name. */
11486 assemble_string (fname, strlen (fname));
11488 /* Register for alloca automatic storage; this is always reg 31.
11489 Only emit this if the alloca bit was set above. */
11490 if (frame_pointer_needed)
11491 fputs ("\t.byte 31\n", file);
11493 fputs ("\t.align 2\n", file);
11497 /* A C compound statement that outputs the assembler code for a thunk
11498 function, used to implement C++ virtual function calls with
11499 multiple inheritance. The thunk acts as a wrapper around a virtual
11500 function, adjusting the implicit object parameter before handing
11501 control off to the real function.
11503 First, emit code to add the integer DELTA to the location that
11504 contains the incoming first argument. Assume that this argument
11505 contains a pointer, and is the one used to pass the `this' pointer
11506 in C++. This is the incoming argument *before* the function
11507 prologue, e.g. `%o0' on a sparc. The addition must preserve the
11508 values of all other incoming arguments.
11510 After the addition, emit code to jump to FUNCTION, which is a
11511 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
11512 not touch the return address. Hence returning from FUNCTION will
11513 return to whoever called the current `thunk'.
11515 The effect must be as if FUNCTION had been called directly with the
11516 adjusted first argument. This macro is responsible for emitting
11517 all of the code for a thunk function; output_function_prologue()
11518 and output_function_epilogue() are not invoked.
11520 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
11521 been extracted from it.) It might possibly be useful on some
11522 targets, but probably not.
11524 If you do not define this macro, the target-independent code in the
11525 C++ frontend will generate a less efficient heavyweight thunk that
11526 calls FUNCTION instead of jumping to it. The generic approach does
11527 not support varargs. */
11529 static void
11530 rs6000_output_mi_thunk (file, thunk_fndecl, delta, vcall_offset, function)
11531 FILE *file;
11532 tree thunk_fndecl ATTRIBUTE_UNUSED;
11533 HOST_WIDE_INT delta;
11534 HOST_WIDE_INT vcall_offset;
11535 tree function;
11537 rtx this, insn, funexp;
11539 reload_completed = 1;
11540 no_new_pseudos = 1;
11542 /* Mark the end of the (empty) prologue. */
11543 emit_note (NULL, NOTE_INSN_PROLOGUE_END);
11545 /* Find the "this" pointer. If the function returns a structure,
11546 the structure return pointer is in r3. */
11547 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function))))
11548 this = gen_rtx_REG (Pmode, 4);
11549 else
11550 this = gen_rtx_REG (Pmode, 3);
11552 /* Apply the constant offset, if required. */
11553 if (delta)
11555 rtx delta_rtx = GEN_INT (delta);
11556 emit_insn (TARGET_32BIT
11557 ? gen_addsi3 (this, this, delta_rtx)
11558 : gen_adddi3 (this, this, delta_rtx));
11561 /* Apply the offset from the vtable, if required. */
11562 if (vcall_offset)
11564 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
11565 rtx tmp = gen_rtx_REG (Pmode, 12);
11567 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
11568 emit_insn (TARGET_32BIT
11569 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
11570 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
11571 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
11572 emit_insn (TARGET_32BIT
11573 ? gen_addsi3 (this, this, tmp)
11574 : gen_adddi3 (this, this, tmp));
11577 /* Generate a tail call to the target function. */
11578 if (!TREE_USED (function))
11580 assemble_external (function);
11581 TREE_USED (function) = 1;
11583 funexp = XEXP (DECL_RTL (function), 0);
11585 SYMBOL_REF_FLAG (funexp) = 0;
11586 if (current_file_function_operand (funexp, VOIDmode)
11587 && (! lookup_attribute ("longcall",
11588 TYPE_ATTRIBUTES (TREE_TYPE (function)))
11589 || lookup_attribute ("shortcall",
11590 TYPE_ATTRIBUTES (TREE_TYPE (function)))))
11591 SYMBOL_REF_FLAG (funexp) = 1;
11593 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
11595 #if TARGET_MACHO
11596 if (flag_pic)
11597 funexp = machopic_indirect_call_target (funexp);
11598 #endif
11600 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
11601 generate sibcall RTL explicitly to avoid constraint abort. */
11602 insn = emit_call_insn (
11603 gen_rtx_PARALLEL (VOIDmode,
11604 gen_rtvec (4,
11605 gen_rtx_CALL (VOIDmode,
11606 funexp, const0_rtx),
11607 gen_rtx_USE (VOIDmode, const0_rtx),
11608 gen_rtx_USE (VOIDmode,
11609 gen_rtx_REG (SImode,
11610 LINK_REGISTER_REGNUM)),
11611 gen_rtx_RETURN (VOIDmode))));
11612 SIBLING_CALL_P (insn) = 1;
11613 emit_barrier ();
11615 /* Run just enough of rest_of_compilation to get the insns emitted.
11616 There's not really enough bulk here to make other passes such as
11617 instruction scheduling worth while. Note that use_thunk calls
11618 assemble_start_function and assemble_end_function. */
11619 insn = get_insns ();
11620 shorten_branches (insn);
11621 final_start_function (insn, file, 1);
11622 final (insn, file, 1, 0);
11623 final_end_function ();
11625 reload_completed = 0;
11626 no_new_pseudos = 0;
11629 /* A quick summary of the various types of 'constant-pool tables'
11630 under PowerPC:
11632 Target Flags Name One table per
11633 AIX (none) AIX TOC object file
11634 AIX -mfull-toc AIX TOC object file
11635 AIX -mminimal-toc AIX minimal TOC translation unit
11636 SVR4/EABI (none) SVR4 SDATA object file
11637 SVR4/EABI -fpic SVR4 pic object file
11638 SVR4/EABI -fPIC SVR4 PIC translation unit
11639 SVR4/EABI -mrelocatable EABI TOC function
11640 SVR4/EABI -maix AIX TOC object file
11641 SVR4/EABI -maix -mminimal-toc
11642 AIX minimal TOC translation unit
11644 Name Reg. Set by entries contains:
11645 made by addrs? fp? sum?
11647 AIX TOC 2 crt0 as Y option option
11648 AIX minimal TOC 30 prolog gcc Y Y option
11649 SVR4 SDATA 13 crt0 gcc N Y N
11650 SVR4 pic 30 prolog ld Y not yet N
11651 SVR4 PIC 30 prolog gcc Y option option
11652 EABI TOC 30 prolog gcc Y option option
11656 /* Hash functions for the hash table. */
11658 static unsigned
11659 rs6000_hash_constant (k)
11660 rtx k;
11662 enum rtx_code code = GET_CODE (k);
11663 enum machine_mode mode = GET_MODE (k);
11664 unsigned result = (code << 3) ^ mode;
11665 const char *format;
11666 int flen, fidx;
11668 format = GET_RTX_FORMAT (code);
11669 flen = strlen (format);
11670 fidx = 0;
11672 switch (code)
11674 case LABEL_REF:
11675 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
11677 case CONST_DOUBLE:
11678 if (mode != VOIDmode)
11679 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
11680 flen = 2;
11681 break;
11683 case CODE_LABEL:
11684 fidx = 3;
11685 break;
11687 default:
11688 break;
11691 for (; fidx < flen; fidx++)
11692 switch (format[fidx])
11694 case 's':
11696 unsigned i, len;
11697 const char *str = XSTR (k, fidx);
11698 len = strlen (str);
11699 result = result * 613 + len;
11700 for (i = 0; i < len; i++)
11701 result = result * 613 + (unsigned) str[i];
11702 break;
11704 case 'u':
11705 case 'e':
11706 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
11707 break;
11708 case 'i':
11709 case 'n':
11710 result = result * 613 + (unsigned) XINT (k, fidx);
11711 break;
11712 case 'w':
11713 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
11714 result = result * 613 + (unsigned) XWINT (k, fidx);
11715 else
11717 size_t i;
11718 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
11719 result = result * 613 + (unsigned) (XWINT (k, fidx)
11720 >> CHAR_BIT * i);
11722 break;
11723 default:
11724 abort ();
11727 return result;
11730 static unsigned
11731 toc_hash_function (hash_entry)
11732 const void * hash_entry;
11734 const struct toc_hash_struct *thc =
11735 (const struct toc_hash_struct *) hash_entry;
11736 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
11739 /* Compare H1 and H2 for equivalence. */
11741 static int
11742 toc_hash_eq (h1, h2)
11743 const void * h1;
11744 const void * h2;
11746 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
11747 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
11749 if (((const struct toc_hash_struct *) h1)->key_mode
11750 != ((const struct toc_hash_struct *) h2)->key_mode)
11751 return 0;
11753 return rtx_equal_p (r1, r2);
11756 /* These are the names given by the C++ front-end to vtables, and
11757 vtable-like objects. Ideally, this logic should not be here;
11758 instead, there should be some programmatic way of inquiring as
11759 to whether or not an object is a vtable. */
11761 #define VTABLE_NAME_P(NAME) \
11762 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
11763 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
11764 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
11765 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
11767 void
11768 rs6000_output_symbol_ref (file, x)
11769 FILE *file;
11770 rtx x;
11772 /* Currently C++ toc references to vtables can be emitted before it
11773 is decided whether the vtable is public or private. If this is
11774 the case, then the linker will eventually complain that there is
11775 a reference to an unknown section. Thus, for vtables only,
11776 we emit the TOC reference to reference the symbol and not the
11777 section. */
11778 const char *name = XSTR (x, 0);
11780 if (VTABLE_NAME_P (name))
11782 RS6000_OUTPUT_BASENAME (file, name);
11784 else
11785 assemble_name (file, name);
11788 /* Output a TOC entry. We derive the entry name from what is being
11789 written. */
11791 void
11792 output_toc (file, x, labelno, mode)
11793 FILE *file;
11794 rtx x;
11795 int labelno;
11796 enum machine_mode mode;
11798 char buf[256];
11799 const char *name = buf;
11800 const char *real_name;
11801 rtx base = x;
11802 int offset = 0;
11804 if (TARGET_NO_TOC)
11805 abort ();
11807 /* When the linker won't eliminate them, don't output duplicate
11808 TOC entries (this happens on AIX if there is any kind of TOC,
11809 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
11810 CODE_LABELs. */
11811 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
11813 struct toc_hash_struct *h;
11814 void * * found;
11816 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
11817 time because GGC is not initialised at that point. */
11818 if (toc_hash_table == NULL)
11819 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
11820 toc_hash_eq, NULL);
11822 h = ggc_alloc (sizeof (*h));
11823 h->key = x;
11824 h->key_mode = mode;
11825 h->labelno = labelno;
11827 found = htab_find_slot (toc_hash_table, h, 1);
11828 if (*found == NULL)
11829 *found = h;
11830 else /* This is indeed a duplicate.
11831 Set this label equal to that label. */
11833 fputs ("\t.set ", file);
11834 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
11835 fprintf (file, "%d,", labelno);
11836 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
11837 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
11838 found)->labelno));
11839 return;
11843 /* If we're going to put a double constant in the TOC, make sure it's
11844 aligned properly when strict alignment is on. */
11845 if (GET_CODE (x) == CONST_DOUBLE
11846 && STRICT_ALIGNMENT
11847 && GET_MODE_BITSIZE (mode) >= 64
11848 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
11849 ASM_OUTPUT_ALIGN (file, 3);
11852 (*targetm.asm_out.internal_label) (file, "LC", labelno);
11854 /* Handle FP constants specially. Note that if we have a minimal
11855 TOC, things we put here aren't actually in the TOC, so we can allow
11856 FP constants. */
11857 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == TFmode)
11859 REAL_VALUE_TYPE rv;
11860 long k[4];
11862 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
11863 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
11865 if (TARGET_64BIT)
11867 if (TARGET_MINIMAL_TOC)
11868 fputs (DOUBLE_INT_ASM_OP, file);
11869 else
11870 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
11871 k[0] & 0xffffffff, k[1] & 0xffffffff,
11872 k[2] & 0xffffffff, k[3] & 0xffffffff);
11873 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
11874 k[0] & 0xffffffff, k[1] & 0xffffffff,
11875 k[2] & 0xffffffff, k[3] & 0xffffffff);
11876 return;
11878 else
11880 if (TARGET_MINIMAL_TOC)
11881 fputs ("\t.long ", file);
11882 else
11883 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
11884 k[0] & 0xffffffff, k[1] & 0xffffffff,
11885 k[2] & 0xffffffff, k[3] & 0xffffffff);
11886 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
11887 k[0] & 0xffffffff, k[1] & 0xffffffff,
11888 k[2] & 0xffffffff, k[3] & 0xffffffff);
11889 return;
11892 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
11894 REAL_VALUE_TYPE rv;
11895 long k[2];
11897 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
11898 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
11900 if (TARGET_64BIT)
11902 if (TARGET_MINIMAL_TOC)
11903 fputs (DOUBLE_INT_ASM_OP, file);
11904 else
11905 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
11906 k[0] & 0xffffffff, k[1] & 0xffffffff);
11907 fprintf (file, "0x%lx%08lx\n",
11908 k[0] & 0xffffffff, k[1] & 0xffffffff);
11909 return;
11911 else
11913 if (TARGET_MINIMAL_TOC)
11914 fputs ("\t.long ", file);
11915 else
11916 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
11917 k[0] & 0xffffffff, k[1] & 0xffffffff);
11918 fprintf (file, "0x%lx,0x%lx\n",
11919 k[0] & 0xffffffff, k[1] & 0xffffffff);
11920 return;
11923 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
11925 REAL_VALUE_TYPE rv;
11926 long l;
11928 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
11929 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
11931 if (TARGET_64BIT)
11933 if (TARGET_MINIMAL_TOC)
11934 fputs (DOUBLE_INT_ASM_OP, file);
11935 else
11936 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
11937 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
11938 return;
11940 else
11942 if (TARGET_MINIMAL_TOC)
11943 fputs ("\t.long ", file);
11944 else
11945 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
11946 fprintf (file, "0x%lx\n", l & 0xffffffff);
11947 return;
11950 else if (GET_MODE (x) == VOIDmode
11951 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
11953 unsigned HOST_WIDE_INT low;
11954 HOST_WIDE_INT high;
11956 if (GET_CODE (x) == CONST_DOUBLE)
11958 low = CONST_DOUBLE_LOW (x);
11959 high = CONST_DOUBLE_HIGH (x);
11961 else
11962 #if HOST_BITS_PER_WIDE_INT == 32
11964 low = INTVAL (x);
11965 high = (low & 0x80000000) ? ~0 : 0;
11967 #else
11969 low = INTVAL (x) & 0xffffffff;
11970 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
11972 #endif
11974 /* TOC entries are always Pmode-sized, but since this
11975 is a bigendian machine then if we're putting smaller
11976 integer constants in the TOC we have to pad them.
11977 (This is still a win over putting the constants in
11978 a separate constant pool, because then we'd have
11979 to have both a TOC entry _and_ the actual constant.)
11981 For a 32-bit target, CONST_INT values are loaded and shifted
11982 entirely within `low' and can be stored in one TOC entry. */
11984 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
11985 abort ();/* It would be easy to make this work, but it doesn't now. */
11987 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
11989 #if HOST_BITS_PER_WIDE_INT == 32
11990 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
11991 POINTER_SIZE, &low, &high, 0);
11992 #else
11993 low |= high << 32;
11994 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
11995 high = (HOST_WIDE_INT) low >> 32;
11996 low &= 0xffffffff;
11997 #endif
12000 if (TARGET_64BIT)
12002 if (TARGET_MINIMAL_TOC)
12003 fputs (DOUBLE_INT_ASM_OP, file);
12004 else
12005 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
12006 (long) high & 0xffffffff, (long) low & 0xffffffff);
12007 fprintf (file, "0x%lx%08lx\n",
12008 (long) high & 0xffffffff, (long) low & 0xffffffff);
12009 return;
12011 else
12013 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
12015 if (TARGET_MINIMAL_TOC)
12016 fputs ("\t.long ", file);
12017 else
12018 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
12019 (long) high & 0xffffffff, (long) low & 0xffffffff);
12020 fprintf (file, "0x%lx,0x%lx\n",
12021 (long) high & 0xffffffff, (long) low & 0xffffffff);
12023 else
12025 if (TARGET_MINIMAL_TOC)
12026 fputs ("\t.long ", file);
12027 else
12028 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
12029 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
12031 return;
12035 if (GET_CODE (x) == CONST)
12037 if (GET_CODE (XEXP (x, 0)) != PLUS)
12038 abort ();
12040 base = XEXP (XEXP (x, 0), 0);
12041 offset = INTVAL (XEXP (XEXP (x, 0), 1));
12044 if (GET_CODE (base) == SYMBOL_REF)
12045 name = XSTR (base, 0);
12046 else if (GET_CODE (base) == LABEL_REF)
12047 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
12048 else if (GET_CODE (base) == CODE_LABEL)
12049 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
12050 else
12051 abort ();
12053 real_name = (*targetm.strip_name_encoding) (name);
12054 if (TARGET_MINIMAL_TOC)
12055 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
12056 else
12058 fprintf (file, "\t.tc %s", real_name);
12060 if (offset < 0)
12061 fprintf (file, ".N%d", - offset);
12062 else if (offset)
12063 fprintf (file, ".P%d", offset);
12065 fputs ("[TC],", file);
12068 /* Currently C++ toc references to vtables can be emitted before it
12069 is decided whether the vtable is public or private. If this is
12070 the case, then the linker will eventually complain that there is
12071 a TOC reference to an unknown section. Thus, for vtables only,
12072 we emit the TOC reference to reference the symbol and not the
12073 section. */
12074 if (VTABLE_NAME_P (name))
12076 RS6000_OUTPUT_BASENAME (file, name);
12077 if (offset < 0)
12078 fprintf (file, "%d", offset);
12079 else if (offset > 0)
12080 fprintf (file, "+%d", offset);
12082 else
12083 output_addr_const (file, x);
12084 putc ('\n', file);
12087 /* Output an assembler pseudo-op to write an ASCII string of N characters
12088 starting at P to FILE.
12090 On the RS/6000, we have to do this using the .byte operation and
12091 write out special characters outside the quoted string.
12092 Also, the assembler is broken; very long strings are truncated,
12093 so we must artificially break them up early. */
12095 void
12096 output_ascii (file, p, n)
12097 FILE *file;
12098 const char *p;
12099 int n;
12101 char c;
12102 int i, count_string;
12103 const char *for_string = "\t.byte \"";
12104 const char *for_decimal = "\t.byte ";
12105 const char *to_close = NULL;
12107 count_string = 0;
12108 for (i = 0; i < n; i++)
12110 c = *p++;
12111 if (c >= ' ' && c < 0177)
12113 if (for_string)
12114 fputs (for_string, file);
12115 putc (c, file);
12117 /* Write two quotes to get one. */
12118 if (c == '"')
12120 putc (c, file);
12121 ++count_string;
12124 for_string = NULL;
12125 for_decimal = "\"\n\t.byte ";
12126 to_close = "\"\n";
12127 ++count_string;
12129 if (count_string >= 512)
12131 fputs (to_close, file);
12133 for_string = "\t.byte \"";
12134 for_decimal = "\t.byte ";
12135 to_close = NULL;
12136 count_string = 0;
12139 else
12141 if (for_decimal)
12142 fputs (for_decimal, file);
12143 fprintf (file, "%d", c);
12145 for_string = "\n\t.byte \"";
12146 for_decimal = ", ";
12147 to_close = "\n";
12148 count_string = 0;
12152 /* Now close the string if we have written one. Then end the line. */
12153 if (to_close)
12154 fputs (to_close, file);
12157 /* Generate a unique section name for FILENAME for a section type
12158 represented by SECTION_DESC. Output goes into BUF.
12160 SECTION_DESC can be any string, as long as it is different for each
12161 possible section type.
12163 We name the section in the same manner as xlc. The name begins with an
12164 underscore followed by the filename (after stripping any leading directory
12165 names) with the last period replaced by the string SECTION_DESC. If
12166 FILENAME does not contain a period, SECTION_DESC is appended to the end of
12167 the name. */
12169 void
12170 rs6000_gen_section_name (buf, filename, section_desc)
12171 char **buf;
12172 const char *filename;
12173 const char *section_desc;
12175 const char *q, *after_last_slash, *last_period = 0;
12176 char *p;
12177 int len;
12179 after_last_slash = filename;
12180 for (q = filename; *q; q++)
12182 if (*q == '/')
12183 after_last_slash = q + 1;
12184 else if (*q == '.')
12185 last_period = q;
12188 len = strlen (after_last_slash) + strlen (section_desc) + 2;
12189 *buf = (char *) xmalloc (len);
12191 p = *buf;
12192 *p++ = '_';
12194 for (q = after_last_slash; *q; q++)
12196 if (q == last_period)
12198 strcpy (p, section_desc);
12199 p += strlen (section_desc);
12200 break;
12203 else if (ISALNUM (*q))
12204 *p++ = *q;
12207 if (last_period == 0)
12208 strcpy (p, section_desc);
12209 else
12210 *p = '\0';
12213 /* Emit profile function. */
12215 void
12216 output_profile_hook (labelno)
12217 int labelno ATTRIBUTE_UNUSED;
12219 if (DEFAULT_ABI == ABI_AIX)
12221 #ifdef NO_PROFILE_COUNTERS
12222 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
12223 #else
12224 char buf[30];
12225 const char *label_name;
12226 rtx fun;
12228 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
12229 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
12230 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
12232 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
12233 fun, Pmode);
12234 #endif
12236 else if (DEFAULT_ABI == ABI_DARWIN)
12238 const char *mcount_name = RS6000_MCOUNT;
12239 int caller_addr_regno = LINK_REGISTER_REGNUM;
12241 /* Be conservative and always set this, at least for now. */
12242 current_function_uses_pic_offset_table = 1;
12244 #if TARGET_MACHO
12245 /* For PIC code, set up a stub and collect the caller's address
12246 from r0, which is where the prologue puts it. */
12247 if (flag_pic)
12249 mcount_name = machopic_stub_name (mcount_name);
12250 if (current_function_uses_pic_offset_table)
12251 caller_addr_regno = 0;
12253 #endif
12254 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
12255 0, VOIDmode, 1,
12256 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
12260 /* Write function profiler code. */
12262 void
12263 output_function_profiler (file, labelno)
12264 FILE *file;
12265 int labelno;
12267 char buf[100];
12268 int save_lr = 8;
12270 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
12271 switch (DEFAULT_ABI)
12273 default:
12274 abort ();
12276 case ABI_V4:
12277 save_lr = 4;
12278 /* Fall through. */
12280 case ABI_AIX_NODESC:
12281 if (!TARGET_32BIT)
12283 warning ("no profiling of 64-bit code for this ABI");
12284 return;
12286 fprintf (file, "\tmflr %s\n", reg_names[0]);
12287 if (flag_pic == 1)
12289 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
12290 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12291 reg_names[0], save_lr, reg_names[1]);
12292 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
12293 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
12294 assemble_name (file, buf);
12295 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
12297 else if (flag_pic > 1)
12299 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12300 reg_names[0], save_lr, reg_names[1]);
12301 /* Now, we need to get the address of the label. */
12302 fputs ("\tbl 1f\n\t.long ", file);
12303 assemble_name (file, buf);
12304 fputs ("-.\n1:", file);
12305 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
12306 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
12307 reg_names[0], reg_names[11]);
12308 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
12309 reg_names[0], reg_names[0], reg_names[11]);
12311 else
12313 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
12314 assemble_name (file, buf);
12315 fputs ("@ha\n", file);
12316 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12317 reg_names[0], save_lr, reg_names[1]);
12318 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
12319 assemble_name (file, buf);
12320 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
12323 if (current_function_needs_context && DEFAULT_ABI == ABI_AIX_NODESC)
12325 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12326 reg_names[STATIC_CHAIN_REGNUM],
12327 12, reg_names[1]);
12328 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
12329 asm_fprintf (file, "\t{l|lwz} %s,%d(%s)\n",
12330 reg_names[STATIC_CHAIN_REGNUM],
12331 12, reg_names[1]);
12333 else
12334 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
12335 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
12336 break;
12338 case ABI_AIX:
12339 case ABI_DARWIN:
12340 /* Don't do anything, done in output_profile_hook (). */
12341 break;
12346 static int
12347 rs6000_use_dfa_pipeline_interface ()
12349 return 1;
12352 /* Power4 load update and store update instructions are cracked into a
12353 load or store and an integer insn which are executed in the same cycle.
12354 Branches have their own dispatch slot which does not count against the
12355 GCC issue rate, but it changes the program flow so there are no other
12356 instructions to issue in this cycle. */
12358 static int
12359 rs6000_variable_issue (stream, verbose, insn, more)
12360 FILE *stream ATTRIBUTE_UNUSED;
12361 int verbose ATTRIBUTE_UNUSED;
12362 rtx insn;
12363 int more;
12365 if (GET_CODE (PATTERN (insn)) == USE
12366 || GET_CODE (PATTERN (insn)) == CLOBBER)
12367 return more;
12369 if (rs6000_cpu == PROCESSOR_POWER4)
12371 enum attr_type type = get_attr_type (insn);
12372 if (type == TYPE_LOAD_EXT_U || type == TYPE_LOAD_EXT_UX
12373 || type == TYPE_LOAD_UX || type == TYPE_STORE_UX
12374 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX)
12375 return 0;
12376 else if (type == TYPE_LOAD_U || type == TYPE_STORE_U
12377 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
12378 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR)
12379 return more > 2 ? more - 2 : 0;
12382 return more - 1;
12385 /* Adjust the cost of a scheduling dependency. Return the new cost of
12386 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
12388 static int
12389 rs6000_adjust_cost (insn, link, dep_insn, cost)
12390 rtx insn;
12391 rtx link;
12392 rtx dep_insn ATTRIBUTE_UNUSED;
12393 int cost;
12395 if (! recog_memoized (insn))
12396 return 0;
12398 if (REG_NOTE_KIND (link) != 0)
12399 return 0;
12401 if (REG_NOTE_KIND (link) == 0)
12403 /* Data dependency; DEP_INSN writes a register that INSN reads
12404 some cycles later. */
12405 switch (get_attr_type (insn))
12407 case TYPE_JMPREG:
12408 /* Tell the first scheduling pass about the latency between
12409 a mtctr and bctr (and mtlr and br/blr). The first
12410 scheduling pass will not know about this latency since
12411 the mtctr instruction, which has the latency associated
12412 to it, will be generated by reload. */
12413 return TARGET_POWER ? 5 : 4;
12414 case TYPE_BRANCH:
12415 /* Leave some extra cycles between a compare and its
12416 dependent branch, to inhibit expensive mispredicts. */
12417 if ((rs6000_cpu_attr == CPU_PPC603
12418 || rs6000_cpu_attr == CPU_PPC604
12419 || rs6000_cpu_attr == CPU_PPC604E
12420 || rs6000_cpu_attr == CPU_PPC620
12421 || rs6000_cpu_attr == CPU_PPC630
12422 || rs6000_cpu_attr == CPU_PPC750
12423 || rs6000_cpu_attr == CPU_PPC7400
12424 || rs6000_cpu_attr == CPU_PPC7450
12425 || rs6000_cpu_attr == CPU_POWER4)
12426 && recog_memoized (dep_insn)
12427 && (INSN_CODE (dep_insn) >= 0)
12428 && (get_attr_type (dep_insn) == TYPE_CMP
12429 || get_attr_type (dep_insn) == TYPE_COMPARE
12430 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
12431 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
12432 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL
12433 || get_attr_type (dep_insn) == TYPE_DELAYED_CR))
12434 return cost + 2;
12435 default:
12436 break;
12438 /* Fall out to return default cost. */
12441 return cost;
12444 /* A C statement (sans semicolon) to update the integer scheduling
12445 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
12446 INSN earlier, increase the priority to execute INSN later. Do not
12447 define this macro if you do not need to adjust the scheduling
12448 priorities of insns. */
12450 static int
12451 rs6000_adjust_priority (insn, priority)
12452 rtx insn ATTRIBUTE_UNUSED;
12453 int priority;
12455 /* On machines (like the 750) which have asymmetric integer units,
12456 where one integer unit can do multiply and divides and the other
12457 can't, reduce the priority of multiply/divide so it is scheduled
12458 before other integer operations. */
12460 #if 0
12461 if (! INSN_P (insn))
12462 return priority;
12464 if (GET_CODE (PATTERN (insn)) == USE)
12465 return priority;
12467 switch (rs6000_cpu_attr) {
12468 case CPU_PPC750:
12469 switch (get_attr_type (insn))
12471 default:
12472 break;
12474 case TYPE_IMUL:
12475 case TYPE_IDIV:
12476 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
12477 priority, priority);
12478 if (priority >= 0 && priority < 0x01000000)
12479 priority >>= 3;
12480 break;
12483 #endif
12485 return priority;
12488 /* Return how many instructions the machine can issue per cycle. */
12490 static int
12491 rs6000_issue_rate ()
12493 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
12494 if (!reload_completed)
12495 return 1;
12497 switch (rs6000_cpu_attr) {
12498 case CPU_RIOS1: /* ? */
12499 case CPU_RS64A:
12500 case CPU_PPC601: /* ? */
12501 case CPU_PPC7450:
12502 return 3;
12503 case CPU_PPC440:
12504 case CPU_PPC603:
12505 case CPU_PPC750:
12506 case CPU_PPC7400:
12507 return 2;
12508 case CPU_RIOS2:
12509 case CPU_PPC604:
12510 case CPU_PPC604E:
12511 case CPU_PPC620:
12512 case CPU_PPC630:
12513 case CPU_POWER4:
12514 return 4;
12515 default:
12516 return 1;
12521 /* Length in units of the trampoline for entering a nested function. */
12524 rs6000_trampoline_size ()
12526 int ret = 0;
12528 switch (DEFAULT_ABI)
12530 default:
12531 abort ();
12533 case ABI_AIX:
12534 ret = (TARGET_32BIT) ? 12 : 24;
12535 break;
12537 case ABI_DARWIN:
12538 case ABI_V4:
12539 case ABI_AIX_NODESC:
12540 ret = (TARGET_32BIT) ? 40 : 48;
12541 break;
12544 return ret;
12547 /* Emit RTL insns to initialize the variable parts of a trampoline.
12548 FNADDR is an RTX for the address of the function's pure code.
12549 CXT is an RTX for the static chain value for the function. */
12551 void
12552 rs6000_initialize_trampoline (addr, fnaddr, cxt)
12553 rtx addr;
12554 rtx fnaddr;
12555 rtx cxt;
12557 enum machine_mode pmode = Pmode;
12558 int regsize = (TARGET_32BIT) ? 4 : 8;
12559 rtx ctx_reg = force_reg (pmode, cxt);
12561 switch (DEFAULT_ABI)
12563 default:
12564 abort ();
12566 /* Macros to shorten the code expansions below. */
12567 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
12568 #define MEM_PLUS(addr,offset) \
12569 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
12571 /* Under AIX, just build the 3 word function descriptor */
12572 case ABI_AIX:
12574 rtx fn_reg = gen_reg_rtx (pmode);
12575 rtx toc_reg = gen_reg_rtx (pmode);
12576 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
12577 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
12578 emit_move_insn (MEM_DEREF (addr), fn_reg);
12579 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
12580 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
12582 break;
12584 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
12585 case ABI_DARWIN:
12586 case ABI_V4:
12587 case ABI_AIX_NODESC:
12588 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
12589 FALSE, VOIDmode, 4,
12590 addr, pmode,
12591 GEN_INT (rs6000_trampoline_size ()), SImode,
12592 fnaddr, pmode,
12593 ctx_reg, pmode);
12594 break;
12597 return;
12601 /* Table of valid machine attributes. */
12603 const struct attribute_spec rs6000_attribute_table[] =
12605 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
12606 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
12607 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
12608 { NULL, 0, 0, false, false, false, NULL }
12611 /* Handle a "longcall" or "shortcall" attribute; arguments as in
12612 struct attribute_spec.handler. */
12614 static tree
12615 rs6000_handle_longcall_attribute (node, name, args, flags, no_add_attrs)
12616 tree *node;
12617 tree name;
12618 tree args ATTRIBUTE_UNUSED;
12619 int flags ATTRIBUTE_UNUSED;
12620 bool *no_add_attrs;
12622 if (TREE_CODE (*node) != FUNCTION_TYPE
12623 && TREE_CODE (*node) != FIELD_DECL
12624 && TREE_CODE (*node) != TYPE_DECL)
12626 warning ("`%s' attribute only applies to functions",
12627 IDENTIFIER_POINTER (name));
12628 *no_add_attrs = true;
12631 return NULL_TREE;
12634 /* Set longcall attributes on all functions declared when
12635 rs6000_default_long_calls is true. */
12636 static void
12637 rs6000_set_default_type_attributes (type)
12638 tree type;
12640 if (rs6000_default_long_calls
12641 && (TREE_CODE (type) == FUNCTION_TYPE
12642 || TREE_CODE (type) == METHOD_TYPE))
12643 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
12644 NULL_TREE,
12645 TYPE_ATTRIBUTES (type));
12648 /* Return a reference suitable for calling a function with the
12649 longcall attribute. */
12651 struct rtx_def *
12652 rs6000_longcall_ref (call_ref)
12653 rtx call_ref;
12655 const char *call_name;
12656 tree node;
12658 if (GET_CODE (call_ref) != SYMBOL_REF)
12659 return call_ref;
12661 /* System V adds '.' to the internal name, so skip them. */
12662 call_name = XSTR (call_ref, 0);
12663 if (*call_name == '.')
12665 while (*call_name == '.')
12666 call_name++;
12668 node = get_identifier (call_name);
12669 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
12672 return force_reg (Pmode, call_ref);
12676 #ifdef USING_ELFOS_H
12678 /* A C statement or statements to switch to the appropriate section
12679 for output of RTX in mode MODE. You can assume that RTX is some
12680 kind of constant in RTL. The argument MODE is redundant except in
12681 the case of a `const_int' rtx. Select the section by calling
12682 `text_section' or one of the alternatives for other sections.
12684 Do not define this macro if you put all constants in the read-only
12685 data section. */
12687 static void
12688 rs6000_elf_select_rtx_section (mode, x, align)
12689 enum machine_mode mode;
12690 rtx x;
12691 unsigned HOST_WIDE_INT align;
12693 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
12694 toc_section ();
12695 else
12696 default_elf_select_rtx_section (mode, x, align);
12699 /* A C statement or statements to switch to the appropriate
12700 section for output of DECL. DECL is either a `VAR_DECL' node
12701 or a constant of some sort. RELOC indicates whether forming
12702 the initial value of DECL requires link-time relocations. */
12704 static void
12705 rs6000_elf_select_section (decl, reloc, align)
12706 tree decl;
12707 int reloc;
12708 unsigned HOST_WIDE_INT align;
12710 default_elf_select_section_1 (decl, reloc, align,
12711 flag_pic || DEFAULT_ABI == ABI_AIX);
12714 /* A C statement to build up a unique section name, expressed as a
12715 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
12716 RELOC indicates whether the initial value of EXP requires
12717 link-time relocations. If you do not define this macro, GCC will use
12718 the symbol name prefixed by `.' as the section name. Note - this
12719 macro can now be called for uninitialized data items as well as
12720 initialized data and functions. */
12722 static void
12723 rs6000_elf_unique_section (decl, reloc)
12724 tree decl;
12725 int reloc;
12727 default_unique_section_1 (decl, reloc,
12728 flag_pic || DEFAULT_ABI == ABI_AIX);
12732 /* If we are referencing a function that is static or is known to be
12733 in this file, make the SYMBOL_REF special. We can use this to indicate
12734 that we can branch to this function without emitting a no-op after the
12735 call. For real AIX calling sequences, we also replace the
12736 function name with the real name (1 or 2 leading .'s), rather than
12737 the function descriptor name. This saves a lot of overriding code
12738 to read the prefixes. */
12740 static void
12741 rs6000_elf_encode_section_info (decl, first)
12742 tree decl;
12743 int first;
12745 if (!first)
12746 return;
12748 if (TREE_CODE (decl) == FUNCTION_DECL)
12750 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
12751 if ((*targetm.binds_local_p) (decl))
12752 SYMBOL_REF_FLAG (sym_ref) = 1;
12754 if (DEFAULT_ABI == ABI_AIX)
12756 size_t len1 = (DEFAULT_ABI == ABI_AIX) ? 1 : 2;
12757 size_t len2 = strlen (XSTR (sym_ref, 0));
12758 char *str = alloca (len1 + len2 + 1);
12759 str[0] = '.';
12760 str[1] = '.';
12761 memcpy (str + len1, XSTR (sym_ref, 0), len2 + 1);
12763 XSTR (sym_ref, 0) = ggc_alloc_string (str, len1 + len2);
12766 else if (rs6000_sdata != SDATA_NONE
12767 && DEFAULT_ABI == ABI_V4
12768 && TREE_CODE (decl) == VAR_DECL)
12770 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
12771 int size = int_size_in_bytes (TREE_TYPE (decl));
12772 tree section_name = DECL_SECTION_NAME (decl);
12773 const char *name = (char *)0;
12774 int len = 0;
12776 if ((*targetm.binds_local_p) (decl))
12777 SYMBOL_REF_FLAG (sym_ref) = 1;
12779 if (section_name)
12781 if (TREE_CODE (section_name) == STRING_CST)
12783 name = TREE_STRING_POINTER (section_name);
12784 len = TREE_STRING_LENGTH (section_name);
12786 else
12787 abort ();
12790 if (name
12791 ? ((len == sizeof (".sdata") - 1
12792 && strcmp (name, ".sdata") == 0)
12793 || (len == sizeof (".sdata2") - 1
12794 && strcmp (name, ".sdata2") == 0)
12795 || (len == sizeof (".sbss") - 1
12796 && strcmp (name, ".sbss") == 0)
12797 || (len == sizeof (".sbss2") - 1
12798 && strcmp (name, ".sbss2") == 0)
12799 || (len == sizeof (".PPC.EMB.sdata0") - 1
12800 && strcmp (name, ".PPC.EMB.sdata0") == 0)
12801 || (len == sizeof (".PPC.EMB.sbss0") - 1
12802 && strcmp (name, ".PPC.EMB.sbss0") == 0))
12803 : (size > 0 && size <= g_switch_value))
12805 size_t len = strlen (XSTR (sym_ref, 0));
12806 char *str = alloca (len + 2);
12808 str[0] = '@';
12809 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
12810 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
12815 static const char *
12816 rs6000_elf_strip_name_encoding (str)
12817 const char *str;
12819 while (*str == '*' || *str == '@')
12820 str++;
12821 return str;
12824 static bool
12825 rs6000_elf_in_small_data_p (decl)
12826 tree decl;
12828 if (rs6000_sdata == SDATA_NONE)
12829 return false;
12831 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
12833 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
12834 if (strcmp (section, ".sdata") == 0
12835 || strcmp (section, ".sdata2") == 0
12836 || strcmp (section, ".sbss") == 0)
12837 return true;
12839 else
12841 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
12843 if (size > 0
12844 && size <= g_switch_value
12845 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
12846 return true;
12849 return false;
12852 #endif /* USING_ELFOS_H */
12855 /* Return a REG that occurs in ADDR with coefficient 1.
12856 ADDR can be effectively incremented by incrementing REG.
12858 r0 is special and we must not select it as an address
12859 register by this routine since our caller will try to
12860 increment the returned register via an "la" instruction. */
12862 struct rtx_def *
12863 find_addr_reg (addr)
12864 rtx addr;
12866 while (GET_CODE (addr) == PLUS)
12868 if (GET_CODE (XEXP (addr, 0)) == REG
12869 && REGNO (XEXP (addr, 0)) != 0)
12870 addr = XEXP (addr, 0);
12871 else if (GET_CODE (XEXP (addr, 1)) == REG
12872 && REGNO (XEXP (addr, 1)) != 0)
12873 addr = XEXP (addr, 1);
12874 else if (CONSTANT_P (XEXP (addr, 0)))
12875 addr = XEXP (addr, 1);
12876 else if (CONSTANT_P (XEXP (addr, 1)))
12877 addr = XEXP (addr, 0);
12878 else
12879 abort ();
12881 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
12882 return addr;
12883 abort ();
12886 void
12887 rs6000_fatal_bad_address (op)
12888 rtx op;
12890 fatal_insn ("bad address", op);
12893 #if TARGET_MACHO
12895 #if 0
12896 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
12897 reference and a constant. */
12900 symbolic_operand (op)
12901 rtx op;
12903 switch (GET_CODE (op))
12905 case SYMBOL_REF:
12906 case LABEL_REF:
12907 return 1;
12908 case CONST:
12909 op = XEXP (op, 0);
12910 return (GET_CODE (op) == SYMBOL_REF ||
12911 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
12912 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
12913 && GET_CODE (XEXP (op, 1)) == CONST_INT);
12914 default:
12915 return 0;
12918 #endif
12920 #ifdef RS6000_LONG_BRANCH
12922 static tree stub_list = 0;
12924 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
12925 procedure calls to the linked list. */
12927 void
12928 add_compiler_stub (label_name, function_name, line_number)
12929 tree label_name;
12930 tree function_name;
12931 int line_number;
12933 tree stub = build_tree_list (function_name, label_name);
12934 TREE_TYPE (stub) = build_int_2 (line_number, 0);
12935 TREE_CHAIN (stub) = stub_list;
12936 stub_list = stub;
12939 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
12940 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
12941 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
12943 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
12944 handling procedure calls from the linked list and initializes the
12945 linked list. */
12947 void
12948 output_compiler_stub ()
12950 char tmp_buf[256];
12951 char label_buf[256];
12952 tree stub;
12954 if (!flag_pic)
12955 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
12957 fprintf (asm_out_file,
12958 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
12960 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
12961 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
12962 fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
12963 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
12965 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
12966 strcpy (label_buf,
12967 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
12968 else
12970 label_buf[0] = '_';
12971 strcpy (label_buf+1,
12972 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
12975 strcpy (tmp_buf, "lis r12,hi16(");
12976 strcat (tmp_buf, label_buf);
12977 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
12978 strcat (tmp_buf, label_buf);
12979 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
12980 output_asm_insn (tmp_buf, 0);
12982 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
12983 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
12984 fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
12985 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
12988 stub_list = 0;
12991 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
12992 already there or not. */
12995 no_previous_def (function_name)
12996 tree function_name;
12998 tree stub;
12999 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
13000 if (function_name == STUB_FUNCTION_NAME (stub))
13001 return 0;
13002 return 1;
13005 /* GET_PREV_LABEL gets the label name from the previous definition of
13006 the function. */
13008 tree
13009 get_prev_label (function_name)
13010 tree function_name;
13012 tree stub;
13013 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
13014 if (function_name == STUB_FUNCTION_NAME (stub))
13015 return STUB_LABEL_NAME (stub);
13016 return 0;
13019 /* INSN is either a function call or a millicode call. It may have an
13020 unconditional jump in its delay slot.
13022 CALL_DEST is the routine we are calling. */
13024 char *
13025 output_call (insn, call_dest, operand_number)
13026 rtx insn;
13027 rtx call_dest;
13028 int operand_number;
13030 static char buf[256];
13031 if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
13033 tree labelname;
13034 tree funname = get_identifier (XSTR (call_dest, 0));
13036 if (no_previous_def (funname))
13038 int line_number = 0;
13039 rtx label_rtx = gen_label_rtx ();
13040 char *label_buf, temp_buf[256];
13041 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
13042 CODE_LABEL_NUMBER (label_rtx));
13043 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
13044 labelname = get_identifier (label_buf);
13045 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
13046 if (insn)
13047 line_number = NOTE_LINE_NUMBER (insn);
13048 add_compiler_stub (labelname, funname, line_number);
13050 else
13051 labelname = get_prev_label (funname);
13053 sprintf (buf, "jbsr %%z%d,%.246s",
13054 operand_number, IDENTIFIER_POINTER (labelname));
13055 return buf;
13057 else
13059 sprintf (buf, "bl %%z%d", operand_number);
13060 return buf;
13064 #endif /* RS6000_LONG_BRANCH */
13066 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
13067 do { \
13068 const char *const symbol_ = (SYMBOL); \
13069 char *buffer_ = (BUF); \
13070 if (symbol_[0] == '"') \
13072 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
13074 else if (name_needs_quotes(symbol_)) \
13076 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
13078 else \
13080 sprintf(buffer_, "L%d$%s", (N), symbol_); \
13082 } while (0)
13085 /* Generate PIC and indirect symbol stubs. */
13087 void
13088 machopic_output_stub (file, symb, stub)
13089 FILE *file;
13090 const char *symb, *stub;
13092 unsigned int length;
13093 char *symbol_name, *lazy_ptr_name;
13094 char *local_label_0;
13095 static int label = 0;
13097 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
13098 symb = (*targetm.strip_name_encoding) (symb);
13100 label += 1;
13102 length = strlen (symb);
13103 symbol_name = alloca (length + 32);
13104 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
13106 lazy_ptr_name = alloca (length + 32);
13107 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
13109 local_label_0 = alloca (length + 32);
13110 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0, symb, length, 0);
13112 if (flag_pic == 2)
13113 machopic_picsymbol_stub1_section ();
13114 else
13115 machopic_symbol_stub1_section ();
13116 fprintf (file, "\t.align 2\n");
13118 fprintf (file, "%s:\n", stub);
13119 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
13121 if (flag_pic == 2)
13123 fprintf (file, "\tmflr r0\n");
13124 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
13125 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
13126 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
13127 lazy_ptr_name, local_label_0);
13128 fprintf (file, "\tmtlr r0\n");
13129 fprintf (file, "\tlwzu r12,lo16(%s-%s)(r11)\n",
13130 lazy_ptr_name, local_label_0);
13131 fprintf (file, "\tmtctr r12\n");
13132 fprintf (file, "\tbctr\n");
13134 else
13135 fprintf (file, "non-pure not supported\n");
13137 machopic_lazy_symbol_ptr_section ();
13138 fprintf (file, "%s:\n", lazy_ptr_name);
13139 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
13140 fprintf (file, "\t.long dyld_stub_binding_helper\n");
13143 /* Legitimize PIC addresses. If the address is already
13144 position-independent, we return ORIG. Newly generated
13145 position-independent addresses go into a reg. This is REG if non
13146 zero, otherwise we allocate register(s) as necessary. */
13148 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
13151 rs6000_machopic_legitimize_pic_address (orig, mode, reg)
13152 rtx orig;
13153 enum machine_mode mode;
13154 rtx reg;
13156 rtx base, offset;
13158 if (reg == NULL && ! reload_in_progress && ! reload_completed)
13159 reg = gen_reg_rtx (Pmode);
13161 if (GET_CODE (orig) == CONST)
13163 if (GET_CODE (XEXP (orig, 0)) == PLUS
13164 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
13165 return orig;
13167 if (GET_CODE (XEXP (orig, 0)) == PLUS)
13169 base =
13170 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
13171 Pmode, reg);
13172 offset =
13173 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
13174 Pmode, reg);
13176 else
13177 abort ();
13179 if (GET_CODE (offset) == CONST_INT)
13181 if (SMALL_INT (offset))
13182 return plus_constant (base, INTVAL (offset));
13183 else if (! reload_in_progress && ! reload_completed)
13184 offset = force_reg (Pmode, offset);
13185 else
13187 rtx mem = force_const_mem (Pmode, orig);
13188 return machopic_legitimize_pic_address (mem, Pmode, reg);
13191 return gen_rtx (PLUS, Pmode, base, offset);
13194 /* Fall back on generic machopic code. */
13195 return machopic_legitimize_pic_address (orig, mode, reg);
13198 /* This is just a placeholder to make linking work without having to
13199 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
13200 ever needed for Darwin (not too likely!) this would have to get a
13201 real definition. */
13203 void
13204 toc_section ()
13208 #endif /* TARGET_MACHO */
13210 #if TARGET_ELF
13211 static unsigned int
13212 rs6000_elf_section_type_flags (decl, name, reloc)
13213 tree decl;
13214 const char *name;
13215 int reloc;
13217 unsigned int flags
13218 = default_section_type_flags_1 (decl, name, reloc,
13219 flag_pic || DEFAULT_ABI == ABI_AIX);
13221 if (TARGET_RELOCATABLE)
13222 flags |= SECTION_WRITE;
13224 return flags;
13227 /* Record an element in the table of global constructors. SYMBOL is
13228 a SYMBOL_REF of the function to be called; PRIORITY is a number
13229 between 0 and MAX_INIT_PRIORITY.
13231 This differs from default_named_section_asm_out_constructor in
13232 that we have special handling for -mrelocatable. */
13234 static void
13235 rs6000_elf_asm_out_constructor (symbol, priority)
13236 rtx symbol;
13237 int priority;
13239 const char *section = ".ctors";
13240 char buf[16];
13242 if (priority != DEFAULT_INIT_PRIORITY)
13244 sprintf (buf, ".ctors.%.5u",
13245 /* Invert the numbering so the linker puts us in the proper
13246 order; constructors are run from right to left, and the
13247 linker sorts in increasing order. */
13248 MAX_INIT_PRIORITY - priority);
13249 section = buf;
13252 named_section_flags (section, SECTION_WRITE);
13253 assemble_align (POINTER_SIZE);
13255 if (TARGET_RELOCATABLE)
13257 fputs ("\t.long (", asm_out_file);
13258 output_addr_const (asm_out_file, symbol);
13259 fputs (")@fixup\n", asm_out_file);
13261 else
13262 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
13265 static void
13266 rs6000_elf_asm_out_destructor (symbol, priority)
13267 rtx symbol;
13268 int priority;
13270 const char *section = ".dtors";
13271 char buf[16];
13273 if (priority != DEFAULT_INIT_PRIORITY)
13275 sprintf (buf, ".dtors.%.5u",
13276 /* Invert the numbering so the linker puts us in the proper
13277 order; constructors are run from right to left, and the
13278 linker sorts in increasing order. */
13279 MAX_INIT_PRIORITY - priority);
13280 section = buf;
13283 named_section_flags (section, SECTION_WRITE);
13284 assemble_align (POINTER_SIZE);
13286 if (TARGET_RELOCATABLE)
13288 fputs ("\t.long (", asm_out_file);
13289 output_addr_const (asm_out_file, symbol);
13290 fputs (")@fixup\n", asm_out_file);
13292 else
13293 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
13295 #endif
13297 #if TARGET_XCOFF
13298 static void
13299 rs6000_xcoff_asm_globalize_label (stream, name)
13300 FILE *stream;
13301 const char *name;
13303 fputs (GLOBAL_ASM_OP, stream);
13304 RS6000_OUTPUT_BASENAME (stream, name);
13305 putc ('\n', stream);
13308 static void
13309 rs6000_xcoff_asm_named_section (name, flags)
13310 const char *name;
13311 unsigned int flags;
13313 int smclass;
13314 static const char * const suffix[3] = { "PR", "RO", "RW" };
13316 if (flags & SECTION_CODE)
13317 smclass = 0;
13318 else if (flags & SECTION_WRITE)
13319 smclass = 2;
13320 else
13321 smclass = 1;
13323 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
13324 (flags & SECTION_CODE) ? "." : "",
13325 name, suffix[smclass], flags & SECTION_ENTSIZE);
13328 static void
13329 rs6000_xcoff_select_section (decl, reloc, align)
13330 tree decl;
13331 int reloc;
13332 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
13334 if (decl_readonly_section_1 (decl, reloc, 1))
13336 if (TREE_PUBLIC (decl))
13337 read_only_data_section ();
13338 else
13339 read_only_private_data_section ();
13341 else
13343 if (TREE_PUBLIC (decl))
13344 data_section ();
13345 else
13346 private_data_section ();
13350 static void
13351 rs6000_xcoff_unique_section (decl, reloc)
13352 tree decl;
13353 int reloc ATTRIBUTE_UNUSED;
13355 const char *name;
13357 /* Use select_section for private and uninitialized data. */
13358 if (!TREE_PUBLIC (decl)
13359 || DECL_COMMON (decl)
13360 || DECL_INITIAL (decl) == NULL_TREE
13361 || DECL_INITIAL (decl) == error_mark_node
13362 || (flag_zero_initialized_in_bss
13363 && initializer_zerop (DECL_INITIAL (decl))))
13364 return;
13366 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
13367 name = (*targetm.strip_name_encoding) (name);
13368 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
13371 /* Select section for constant in constant pool.
13373 On RS/6000, all constants are in the private read-only data area.
13374 However, if this is being placed in the TOC it must be output as a
13375 toc entry. */
13377 static void
13378 rs6000_xcoff_select_rtx_section (mode, x, align)
13379 enum machine_mode mode;
13380 rtx x;
13381 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
13383 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
13384 toc_section ();
13385 else
13386 read_only_private_data_section ();
13389 /* Remove any trailing [DS] or the like from the symbol name. */
13391 static const char *
13392 rs6000_xcoff_strip_name_encoding (name)
13393 const char *name;
13395 size_t len;
13396 if (*name == '*')
13397 name++;
13398 len = strlen (name);
13399 if (name[len - 1] == ']')
13400 return ggc_alloc_string (name, len - 4);
13401 else
13402 return name;
13405 /* Section attributes. AIX is always PIC. */
13407 static unsigned int
13408 rs6000_xcoff_section_type_flags (decl, name, reloc)
13409 tree decl;
13410 const char *name;
13411 int reloc;
13413 unsigned int align;
13414 unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
13416 /* Align to at least UNIT size. */
13417 if (flags & SECTION_CODE)
13418 align = MIN_UNITS_PER_WORD;
13419 else
13420 /* Increase alignment of large objects if not already stricter. */
13421 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
13422 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
13423 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
13425 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
13428 #endif /* TARGET_XCOFF */
13430 /* Note that this is also used for PPC64 Linux. */
13432 static void
13433 rs6000_xcoff_encode_section_info (decl, first)
13434 tree decl;
13435 int first ATTRIBUTE_UNUSED;
13437 if (TREE_CODE (decl) == FUNCTION_DECL
13438 && (*targetm.binds_local_p) (decl))
13439 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
13442 /* Cross-module name binding. For AIX and PPC64 Linux, which always are
13443 PIC, use private copy of flag_pic. Darwin does not support overriding
13444 functions at dynamic-link time. */
13446 static bool
13447 rs6000_binds_local_p (decl)
13448 tree decl;
13450 return default_binds_local_p_1 (decl,
13451 DEFAULT_ABI == ABI_DARWIN ? 0 : flag_pic || rs6000_flag_pic);
13454 /* Compute a (partial) cost for rtx X. Return true if the complete
13455 cost has been computed, and false if subexpressions should be
13456 scanned. In either case, *TOTAL contains the cost result. */
13458 static bool
13459 rs6000_rtx_costs (x, code, outer_code, total)
13460 rtx x;
13461 int code, outer_code ATTRIBUTE_UNUSED;
13462 int *total;
13464 switch (code)
13466 /* On the RS/6000, if it is valid in the insn, it is free.
13467 So this always returns 0. */
13468 case CONST_INT:
13469 case CONST:
13470 case LABEL_REF:
13471 case SYMBOL_REF:
13472 case CONST_DOUBLE:
13473 case HIGH:
13474 *total = 0;
13475 return true;
13477 case PLUS:
13478 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
13479 && ((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1))
13480 + 0x8000) >= 0x10000)
13481 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
13482 ? COSTS_N_INSNS (2)
13483 : COSTS_N_INSNS (1));
13484 return true;
13486 case AND:
13487 case IOR:
13488 case XOR:
13489 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
13490 && (INTVAL (XEXP (x, 1)) & (~ (HOST_WIDE_INT) 0xffff)) != 0
13491 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
13492 ? COSTS_N_INSNS (2)
13493 : COSTS_N_INSNS (1));
13494 return true;
13496 case MULT:
13497 if (optimize_size)
13499 *total = COSTS_N_INSNS (2);
13500 return true;
13502 switch (rs6000_cpu)
13504 case PROCESSOR_RIOS1:
13505 case PROCESSOR_PPC405:
13506 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13507 ? COSTS_N_INSNS (5)
13508 : (INTVAL (XEXP (x, 1)) >= -256
13509 && INTVAL (XEXP (x, 1)) <= 255)
13510 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
13511 return true;
13513 case PROCESSOR_RS64A:
13514 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13515 ? GET_MODE (XEXP (x, 1)) != DImode
13516 ? COSTS_N_INSNS (20) : COSTS_N_INSNS (34)
13517 : (INTVAL (XEXP (x, 1)) >= -256
13518 && INTVAL (XEXP (x, 1)) <= 255)
13519 ? COSTS_N_INSNS (8) : COSTS_N_INSNS (12));
13520 return true;
13522 case PROCESSOR_RIOS2:
13523 case PROCESSOR_MPCCORE:
13524 case PROCESSOR_PPC604e:
13525 *total = COSTS_N_INSNS (2);
13526 return true;
13528 case PROCESSOR_PPC601:
13529 *total = COSTS_N_INSNS (5);
13530 return true;
13532 case PROCESSOR_PPC603:
13533 case PROCESSOR_PPC7400:
13534 case PROCESSOR_PPC750:
13535 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13536 ? COSTS_N_INSNS (5)
13537 : (INTVAL (XEXP (x, 1)) >= -256
13538 && INTVAL (XEXP (x, 1)) <= 255)
13539 ? COSTS_N_INSNS (2) : COSTS_N_INSNS (3));
13540 return true;
13542 case PROCESSOR_PPC7450:
13543 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13544 ? COSTS_N_INSNS (4)
13545 : COSTS_N_INSNS (3));
13546 return true;
13548 case PROCESSOR_PPC403:
13549 case PROCESSOR_PPC604:
13550 case PROCESSOR_PPC8540:
13551 *total = COSTS_N_INSNS (4);
13552 return true;
13554 case PROCESSOR_PPC620:
13555 case PROCESSOR_PPC630:
13556 case PROCESSOR_POWER4:
13557 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13558 ? GET_MODE (XEXP (x, 1)) != DImode
13559 ? COSTS_N_INSNS (5) : COSTS_N_INSNS (7)
13560 : (INTVAL (XEXP (x, 1)) >= -256
13561 && INTVAL (XEXP (x, 1)) <= 255)
13562 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
13563 return true;
13565 default:
13566 abort ();
13569 case DIV:
13570 case MOD:
13571 if (GET_CODE (XEXP (x, 1)) == CONST_INT
13572 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
13574 *total = COSTS_N_INSNS (2);
13575 return true;
13577 /* FALLTHRU */
13579 case UDIV:
13580 case UMOD:
13581 switch (rs6000_cpu)
13583 case PROCESSOR_RIOS1:
13584 *total = COSTS_N_INSNS (19);
13585 return true;
13587 case PROCESSOR_RIOS2:
13588 *total = COSTS_N_INSNS (13);
13589 return true;
13591 case PROCESSOR_RS64A:
13592 *total = (GET_MODE (XEXP (x, 1)) != DImode
13593 ? COSTS_N_INSNS (65)
13594 : COSTS_N_INSNS (67));
13595 return true;
13597 case PROCESSOR_MPCCORE:
13598 *total = COSTS_N_INSNS (6);
13599 return true;
13601 case PROCESSOR_PPC403:
13602 *total = COSTS_N_INSNS (33);
13603 return true;
13605 case PROCESSOR_PPC405:
13606 *total = COSTS_N_INSNS (35);
13607 return true;
13609 case PROCESSOR_PPC601:
13610 *total = COSTS_N_INSNS (36);
13611 return true;
13613 case PROCESSOR_PPC603:
13614 *total = COSTS_N_INSNS (37);
13615 return true;
13617 case PROCESSOR_PPC604:
13618 case PROCESSOR_PPC604e:
13619 *total = COSTS_N_INSNS (20);
13620 return true;
13622 case PROCESSOR_PPC620:
13623 case PROCESSOR_PPC630:
13624 case PROCESSOR_POWER4:
13625 *total = (GET_MODE (XEXP (x, 1)) != DImode
13626 ? COSTS_N_INSNS (21)
13627 : COSTS_N_INSNS (37));
13628 return true;
13630 case PROCESSOR_PPC750:
13631 case PROCESSOR_PPC8540:
13632 case PROCESSOR_PPC7400:
13633 *total = COSTS_N_INSNS (19);
13634 return true;
13636 case PROCESSOR_PPC7450:
13637 *total = COSTS_N_INSNS (23);
13638 return true;
13640 default:
13641 abort ();
13644 case FFS:
13645 *total = COSTS_N_INSNS (4);
13646 return true;
13648 case MEM:
13649 /* MEM should be slightly more expensive than (plus (reg) (const)) */
13650 *total = 5;
13651 return true;
13653 default:
13654 return false;
13658 /* A C expression returning the cost of moving data from a register of class
13659 CLASS1 to one of CLASS2. */
13662 rs6000_register_move_cost (mode, from, to)
13663 enum machine_mode mode;
13664 enum reg_class from, to;
13666 /* Moves from/to GENERAL_REGS. */
13667 if (reg_classes_intersect_p (to, GENERAL_REGS)
13668 || reg_classes_intersect_p (from, GENERAL_REGS))
13670 if (! reg_classes_intersect_p (to, GENERAL_REGS))
13671 from = to;
13673 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
13674 return (rs6000_memory_move_cost (mode, from, 0)
13675 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
13677 /* It's more expensive to move CR_REGS than CR0_REGS because of the shift...*/
13678 else if (from == CR_REGS)
13679 return 4;
13681 else
13682 /* A move will cost one instruction per GPR moved. */
13683 return 2 * HARD_REGNO_NREGS (0, mode);
13686 /* Moving between two similar registers is just one instruction. */
13687 else if (reg_classes_intersect_p (to, from))
13688 return mode == TFmode ? 4 : 2;
13690 /* Everything else has to go through GENERAL_REGS. */
13691 else
13692 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
13693 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
13696 /* A C expressions returning the cost of moving data of MODE from a register to
13697 or from memory. */
13700 rs6000_memory_move_cost (mode, class, in)
13701 enum machine_mode mode;
13702 enum reg_class class;
13703 int in ATTRIBUTE_UNUSED;
13705 if (reg_classes_intersect_p (class, GENERAL_REGS))
13706 return 4 * HARD_REGNO_NREGS (0, mode);
13707 else if (reg_classes_intersect_p (class, FLOAT_REGS))
13708 return 4 * HARD_REGNO_NREGS (32, mode);
13709 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
13710 return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO, mode);
13711 else
13712 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
13715 /* Return true if TYPE is of type __ev64_opaque__. */
13717 static bool
13718 is_ev64_opaque_type (type)
13719 tree type;
13721 return (TARGET_SPE
13722 && TREE_CODE (type) == VECTOR_TYPE
13723 && TYPE_NAME (type)
13724 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
13725 && DECL_NAME (TYPE_NAME (type))
13726 && strcmp (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type))),
13727 "__ev64_opaque__") == 0);
13730 static rtx
13731 rs6000_dwarf_register_span (reg)
13732 rtx reg;
13734 unsigned regno;
13736 if (!TARGET_SPE || !SPE_VECTOR_MODE (GET_MODE (reg)))
13737 return NULL_RTX;
13739 regno = REGNO (reg);
13741 /* The duality of the SPE register size wreaks all kinds of havoc.
13742 This is a way of distinguishing r0 in 32-bits from r0 in
13743 64-bits. */
13744 return
13745 gen_rtx_PARALLEL (VOIDmode,
13746 gen_rtvec (2,
13747 gen_rtx_REG (SImode, regno),
13748 /* Who, where, what? 1200? This
13749 will get changed to a sane value
13750 when the SPE ABI finalizes. */
13751 gen_rtx_REG (SImode, regno + 1200)));
13754 #include "gt-rs6000.h"