FSF GCC merge 02/23/03
[official-gcc.git] / gcc / config / rs6000 / rs6000.c
blob500af598e99b9dd4b1363d81298b86a4cf1f9d17
1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
34 #include "flags.h"
35 #include "recog.h"
36 #include "obstack.h"
37 #include "tree.h"
38 #include "expr.h"
39 #include "optabs.h"
40 #include "except.h"
41 #include "function.h"
42 #include "output.h"
43 #include "basic-block.h"
44 #include "integrate.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "hashtab.h"
48 #include "tm_p.h"
49 #include "target.h"
50 #include "target-def.h"
51 #include "langhooks.h"
52 #include "reload.h"
54 #ifndef TARGET_NO_PROTOTYPE
55 #define TARGET_NO_PROTOTYPE 0
56 #endif
58 #define min(A,B) ((A) < (B) ? (A) : (B))
59 #define max(A,B) ((A) > (B) ? (A) : (B))
61 /* Target cpu type */
63 enum processor_type rs6000_cpu;
64 struct rs6000_cpu_select rs6000_select[3] =
66 /* switch name, tune arch */
67 { (const char *)0, "--with-cpu=", 1, 1 },
68 { (const char *)0, "-mcpu=", 1, 1 },
69 { (const char *)0, "-mtune=", 1, 0 },
72 /* Size of long double */
73 const char *rs6000_long_double_size_string;
74 int rs6000_long_double_type_size;
76 /* Whether -mabi=altivec has appeared */
77 int rs6000_altivec_abi;
79 /* Whether VRSAVE instructions should be generated. */
80 int rs6000_altivec_vrsave;
82 /* String from -mvrsave= option. */
83 const char *rs6000_altivec_vrsave_string;
85 /* Nonzero if we want SPE ABI extensions. */
86 int rs6000_spe_abi;
88 /* Whether isel instructions should be generated. */
89 int rs6000_isel;
91 /* Nonzero if we have FPRs. */
92 int rs6000_fprs = 1;
94 /* String from -misel=. */
95 const char *rs6000_isel_string;
97 /* Set to nonzero once AIX common-mode calls have been defined. */
98 static GTY(()) int common_mode_defined;
100 /* Private copy of original value of flag_pic for ABI_AIX. */
101 static int rs6000_flag_pic;
103 /* Save information from a "cmpxx" operation until the branch or scc is
104 emitted. */
105 rtx rs6000_compare_op0, rs6000_compare_op1;
106 int rs6000_compare_fp_p;
108 /* Label number of label created for -mrelocatable, to call to so we can
109 get the address of the GOT section */
110 int rs6000_pic_labelno;
112 #ifdef USING_ELFOS_H
113 /* Which abi to adhere to */
114 const char *rs6000_abi_name = RS6000_ABI_NAME;
116 /* Semantics of the small data area */
117 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
119 /* Which small data model to use */
120 const char *rs6000_sdata_name = (char *)0;
122 /* Counter for labels which are to be placed in .fixup. */
123 int fixuplabelno = 0;
124 #endif
126 /* ABI enumeration available for subtarget to use. */
127 enum rs6000_abi rs6000_current_abi;
129 /* ABI string from -mabi= option. */
130 const char *rs6000_abi_string;
132 /* Debug flags */
133 const char *rs6000_debug_name;
134 int rs6000_debug_stack; /* debug stack applications */
135 int rs6000_debug_arg; /* debug argument handling */
137 const char *rs6000_traceback_name;
138 static enum {
139 traceback_default = 0,
140 traceback_none,
141 traceback_part,
142 traceback_full
143 } rs6000_traceback;
145 /* Flag to say the TOC is initialized */
146 int toc_initialized;
147 char toc_label_name[10];
149 /* Alias set for saves and restores from the rs6000 stack. */
150 static int rs6000_sr_alias_set;
152 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
153 The only place that looks at this is rs6000_set_default_type_attributes;
154 everywhere else should rely on the presence or absence of a longcall
155 attribute on the function declaration. */
156 int rs6000_default_long_calls;
157 const char *rs6000_longcall_switch;
159 struct builtin_description
161 /* mask is not const because we're going to alter it below. This
162 nonsense will go away when we rewrite the -march infrastructure
163 to give us more target flag bits. */
164 unsigned int mask;
165 const enum insn_code icode;
166 const char *const name;
167 const enum rs6000_builtins code;
170 static bool rs6000_function_ok_for_sibcall PARAMS ((tree, tree));
171 static int num_insns_constant_wide PARAMS ((HOST_WIDE_INT));
172 static void validate_condition_mode
173 PARAMS ((enum rtx_code, enum machine_mode));
174 static rtx rs6000_generate_compare PARAMS ((enum rtx_code));
175 static void rs6000_maybe_dead PARAMS ((rtx));
176 static void rs6000_emit_stack_tie PARAMS ((void));
177 static void rs6000_frame_related PARAMS ((rtx, rtx, HOST_WIDE_INT, rtx, rtx));
178 static void emit_frame_save PARAMS ((rtx, rtx, enum machine_mode,
179 unsigned int, int, int));
180 static rtx gen_frame_mem_offset PARAMS ((enum machine_mode, rtx, int));
181 static void rs6000_emit_allocate_stack PARAMS ((HOST_WIDE_INT, int));
182 static unsigned rs6000_hash_constant PARAMS ((rtx));
183 static unsigned toc_hash_function PARAMS ((const void *));
184 static int toc_hash_eq PARAMS ((const void *, const void *));
185 static int constant_pool_expr_1 PARAMS ((rtx, int *, int *));
186 static struct machine_function * rs6000_init_machine_status PARAMS ((void));
187 static bool rs6000_assemble_integer PARAMS ((rtx, unsigned int, int));
188 #ifdef HAVE_GAS_HIDDEN
189 static void rs6000_assemble_visibility PARAMS ((tree, int));
190 #endif
191 static int rs6000_ra_ever_killed PARAMS ((void));
192 static tree rs6000_handle_longcall_attribute PARAMS ((tree *, tree, tree, int, bool *));
193 const struct attribute_spec rs6000_attribute_table[];
194 static void rs6000_set_default_type_attributes PARAMS ((tree));
195 static void rs6000_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
196 static void rs6000_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
197 static void rs6000_output_mi_thunk PARAMS ((FILE *, tree, HOST_WIDE_INT,
198 HOST_WIDE_INT, tree));
199 static rtx rs6000_emit_set_long_const PARAMS ((rtx,
200 HOST_WIDE_INT, HOST_WIDE_INT));
201 #if TARGET_ELF
202 static unsigned int rs6000_elf_section_type_flags PARAMS ((tree, const char *,
203 int));
204 static void rs6000_elf_asm_out_constructor PARAMS ((rtx, int));
205 static void rs6000_elf_asm_out_destructor PARAMS ((rtx, int));
206 static void rs6000_elf_select_section PARAMS ((tree, int,
207 unsigned HOST_WIDE_INT));
208 static void rs6000_elf_unique_section PARAMS ((tree, int));
209 static void rs6000_elf_select_rtx_section PARAMS ((enum machine_mode, rtx,
210 unsigned HOST_WIDE_INT));
211 static void rs6000_elf_encode_section_info PARAMS ((tree, int))
212 ATTRIBUTE_UNUSED;
213 static const char *rs6000_elf_strip_name_encoding PARAMS ((const char *));
214 static bool rs6000_elf_in_small_data_p PARAMS ((tree));
215 #endif
216 #if TARGET_XCOFF
217 static void rs6000_xcoff_asm_globalize_label PARAMS ((FILE *, const char *));
218 static void rs6000_xcoff_asm_named_section PARAMS ((const char *, unsigned int));
219 static void rs6000_xcoff_select_section PARAMS ((tree, int,
220 unsigned HOST_WIDE_INT));
221 static void rs6000_xcoff_unique_section PARAMS ((tree, int));
222 static void rs6000_xcoff_select_rtx_section PARAMS ((enum machine_mode, rtx,
223 unsigned HOST_WIDE_INT));
224 static const char * rs6000_xcoff_strip_name_encoding PARAMS ((const char *));
225 static unsigned int rs6000_xcoff_section_type_flags PARAMS ((tree, const char *, int));
226 #endif
227 static void rs6000_xcoff_encode_section_info PARAMS ((tree, int))
228 ATTRIBUTE_UNUSED;
229 static bool rs6000_binds_local_p PARAMS ((tree));
230 static int rs6000_use_dfa_pipeline_interface PARAMS ((void));
231 static int rs6000_multipass_dfa_lookahead PARAMS ((void));
232 static int rs6000_variable_issue PARAMS ((FILE *, int, rtx, int));
233 static bool rs6000_rtx_costs PARAMS ((rtx, int, int, int *));
234 static int rs6000_adjust_cost PARAMS ((rtx, rtx, rtx, int));
235 static int rs6000_adjust_priority PARAMS ((rtx, int));
236 static int rs6000_issue_rate PARAMS ((void));
238 static void rs6000_init_builtins PARAMS ((void));
239 static rtx rs6000_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx));
240 static rtx rs6000_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx));
241 static rtx rs6000_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx));
242 static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int));
243 static void altivec_init_builtins PARAMS ((void));
244 static void rs6000_common_init_builtins PARAMS ((void));
246 static void enable_mask_for_builtins PARAMS ((struct builtin_description *,
247 int, enum rs6000_builtins,
248 enum rs6000_builtins));
249 static void spe_init_builtins PARAMS ((void));
250 static rtx spe_expand_builtin PARAMS ((tree, rtx, bool *));
251 static rtx spe_expand_predicate_builtin PARAMS ((enum insn_code, tree, rtx));
252 static rtx spe_expand_evsel_builtin PARAMS ((enum insn_code, tree, rtx));
253 static int rs6000_emit_int_cmove PARAMS ((rtx, rtx, rtx, rtx));
255 static rtx altivec_expand_builtin PARAMS ((tree, rtx, bool *));
256 static rtx altivec_expand_ld_builtin PARAMS ((tree, rtx, bool *));
257 static rtx altivec_expand_st_builtin PARAMS ((tree, rtx, bool *));
258 static rtx altivec_expand_dst_builtin PARAMS ((tree, rtx, bool *));
259 static rtx altivec_expand_abs_builtin PARAMS ((enum insn_code, tree, rtx));
260 static rtx altivec_expand_predicate_builtin PARAMS ((enum insn_code, const char *, tree, rtx));
261 static rtx altivec_expand_stv_builtin PARAMS ((enum insn_code, tree));
262 static void rs6000_parse_abi_options PARAMS ((void));
263 static void rs6000_parse_vrsave_option PARAMS ((void));
264 static void rs6000_parse_isel_option PARAMS ((void));
265 static int first_altivec_reg_to_save PARAMS ((void));
266 static unsigned int compute_vrsave_mask PARAMS ((void));
267 static void is_altivec_return_reg PARAMS ((rtx, void *));
268 static rtx generate_set_vrsave PARAMS ((rtx, rs6000_stack_t *, int));
269 static void altivec_frame_fixup PARAMS ((rtx, rtx, HOST_WIDE_INT));
270 static int easy_vector_constant PARAMS ((rtx));
271 static int is_ev64_opaque_type PARAMS ((tree));
272 static bool rs6000_spe_vector_types_compatible PARAMS ((tree, tree));
274 /* Hash table stuff for keeping track of TOC entries. */
276 struct toc_hash_struct GTY(())
278 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
279 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
280 rtx key;
281 enum machine_mode key_mode;
282 int labelno;
285 static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
287 /* Default register names. */
288 char rs6000_reg_names[][8] =
290 "0", "1", "2", "3", "4", "5", "6", "7",
291 "8", "9", "10", "11", "12", "13", "14", "15",
292 "16", "17", "18", "19", "20", "21", "22", "23",
293 "24", "25", "26", "27", "28", "29", "30", "31",
294 "0", "1", "2", "3", "4", "5", "6", "7",
295 "8", "9", "10", "11", "12", "13", "14", "15",
296 "16", "17", "18", "19", "20", "21", "22", "23",
297 "24", "25", "26", "27", "28", "29", "30", "31",
298 "mq", "lr", "ctr","ap",
299 "0", "1", "2", "3", "4", "5", "6", "7",
300 "xer",
301 /* AltiVec registers. */
302 "0", "1", "2", "3", "4", "5", "6", "7",
303 "8", "9", "10", "11", "12", "13", "14", "15",
304 "16", "17", "18", "19", "20", "21", "22", "23",
305 "24", "25", "26", "27", "28", "29", "30", "31",
306 "vrsave", "vscr",
307 /* SPE registers. */
308 "spe_acc", "spefscr"
311 #ifdef TARGET_REGNAMES
312 static const char alt_reg_names[][8] =
314 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
315 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
316 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
317 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
318 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
319 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
320 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
321 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
322 "mq", "lr", "ctr", "ap",
323 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
324 "xer",
325 /* AltiVec registers. */
326 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
327 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
328 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
329 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
330 "vrsave", "vscr",
331 /* SPE registers. */
332 "spe_acc", "spefscr"
334 #endif
336 #ifndef MASK_STRICT_ALIGN
337 #define MASK_STRICT_ALIGN 0
338 #endif
340 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
341 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
343 /* Initialize the GCC target structure. */
344 #undef TARGET_ATTRIBUTE_TABLE
345 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
346 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
347 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
349 #undef TARGET_ASM_ALIGNED_DI_OP
350 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
352 /* Default unaligned ops are only provided for ELF. Find the ops needed
353 for non-ELF systems. */
354 #ifndef OBJECT_FORMAT_ELF
355 #if TARGET_XCOFF
356 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
357 64-bit targets. */
358 #undef TARGET_ASM_UNALIGNED_HI_OP
359 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
360 #undef TARGET_ASM_UNALIGNED_SI_OP
361 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
362 #undef TARGET_ASM_UNALIGNED_DI_OP
363 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
364 #else
365 /* For Darwin. */
366 #undef TARGET_ASM_UNALIGNED_HI_OP
367 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
368 #undef TARGET_ASM_UNALIGNED_SI_OP
369 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
370 #endif
371 #endif
373 /* This hook deals with fixups for relocatable code and DI-mode objects
374 in 64-bit code. */
375 #undef TARGET_ASM_INTEGER
376 #define TARGET_ASM_INTEGER rs6000_assemble_integer
378 #ifdef HAVE_GAS_HIDDEN
379 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
380 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
381 #endif
383 #undef TARGET_ASM_FUNCTION_PROLOGUE
384 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
385 #undef TARGET_ASM_FUNCTION_EPILOGUE
386 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
388 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
389 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE rs6000_use_dfa_pipeline_interface
390 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
391 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_multipass_dfa_lookahead
392 #undef TARGET_SCHED_VARIABLE_ISSUE
393 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
395 #undef TARGET_SCHED_ISSUE_RATE
396 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
397 #undef TARGET_SCHED_ADJUST_COST
398 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
399 #undef TARGET_SCHED_ADJUST_PRIORITY
400 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
402 #undef TARGET_INIT_BUILTINS
403 #define TARGET_INIT_BUILTINS rs6000_init_builtins
405 #undef TARGET_EXPAND_BUILTIN
406 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
408 #undef TARGET_BINDS_LOCAL_P
409 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
411 #undef TARGET_ASM_OUTPUT_MI_THUNK
412 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
414 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
415 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
417 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
418 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
420 #undef TARGET_RTX_COSTS
421 #define TARGET_RTX_COSTS rs6000_rtx_costs
422 #undef TARGET_ADDRESS_COST
423 #define TARGET_ADDRESS_COST hook_int_rtx_0
425 #undef TARGET_VECTOR_TYPES_COMPATIBLE
426 #define TARGET_VECTOR_TYPES_COMPATIBLE rs6000_spe_vector_types_compatible
428 struct gcc_target targetm = TARGET_INITIALIZER;
430 /* Override command line options. Mostly we process the processor
431 type and sometimes adjust other TARGET_ options. */
433 void
434 rs6000_override_options (default_cpu)
435 const char *default_cpu;
437 size_t i, j;
438 struct rs6000_cpu_select *ptr;
440 /* Simplify the entries below by making a mask for any POWER
441 variant and any PowerPC variant. */
443 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
444 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
445 | MASK_PPC_GFXOPT | MASK_POWERPC64)
446 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
448 static struct ptt
450 const char *const name; /* Canonical processor name. */
451 const enum processor_type processor; /* Processor type enum value. */
452 const int target_enable; /* Target flags to enable. */
453 const int target_disable; /* Target flags to disable. */
454 } const processor_target_table[]
455 = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
456 POWER_MASKS | POWERPC_MASKS},
457 {"power", PROCESSOR_POWER,
458 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
459 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
460 {"power2", PROCESSOR_POWER,
461 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
462 POWERPC_MASKS | MASK_NEW_MNEMONICS},
463 {"power3", PROCESSOR_PPC630,
464 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
465 POWER_MASKS | MASK_PPC_GPOPT},
466 {"power4", PROCESSOR_POWER4,
467 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
468 POWER_MASKS | MASK_PPC_GPOPT},
469 {"powerpc", PROCESSOR_POWERPC,
470 MASK_POWERPC | MASK_NEW_MNEMONICS,
471 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
472 {"powerpc64", PROCESSOR_POWERPC64,
473 MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
474 POWER_MASKS | POWERPC_OPT_MASKS},
475 {"rios", PROCESSOR_RIOS1,
476 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
477 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
478 {"rios1", PROCESSOR_RIOS1,
479 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
480 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
481 {"rsc", PROCESSOR_PPC601,
482 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
483 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
484 {"rsc1", PROCESSOR_PPC601,
485 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
486 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
487 {"rios2", PROCESSOR_RIOS2,
488 MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
489 POWERPC_MASKS | MASK_NEW_MNEMONICS},
490 {"rs64a", PROCESSOR_RS64A,
491 MASK_POWERPC | MASK_NEW_MNEMONICS,
492 POWER_MASKS | POWERPC_OPT_MASKS},
493 {"401", PROCESSOR_PPC403,
494 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
495 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
496 {"403", PROCESSOR_PPC403,
497 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
498 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
499 {"405", PROCESSOR_PPC405,
500 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
501 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
502 {"405f", PROCESSOR_PPC405,
503 MASK_POWERPC | MASK_NEW_MNEMONICS,
504 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
505 {"505", PROCESSOR_MPCCORE,
506 MASK_POWERPC | MASK_NEW_MNEMONICS,
507 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
508 {"601", PROCESSOR_PPC601,
509 MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
510 MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
511 {"602", PROCESSOR_PPC603,
512 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
513 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
514 {"603", PROCESSOR_PPC603,
515 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
516 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
517 {"603e", PROCESSOR_PPC603,
518 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
519 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
520 {"ec603e", PROCESSOR_PPC603,
521 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
522 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
523 {"604", PROCESSOR_PPC604,
524 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
525 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
526 {"604e", PROCESSOR_PPC604e,
527 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
528 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
529 {"620", PROCESSOR_PPC620,
530 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
531 POWER_MASKS | MASK_PPC_GPOPT},
532 {"630", PROCESSOR_PPC630,
533 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
534 POWER_MASKS | MASK_PPC_GPOPT},
535 {"740", PROCESSOR_PPC750,
536 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
537 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
538 {"750", PROCESSOR_PPC750,
539 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
540 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
541 {"7400", PROCESSOR_PPC7400,
542 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
543 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
544 {"7450", PROCESSOR_PPC7450,
545 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
546 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
547 {"8540", PROCESSOR_PPC8540,
548 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
549 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
550 {"801", PROCESSOR_MPCCORE,
551 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
552 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
553 {"821", PROCESSOR_MPCCORE,
554 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
555 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
556 {"823", PROCESSOR_MPCCORE,
557 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
558 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
559 {"860", PROCESSOR_MPCCORE,
560 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
561 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64}};
563 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
565 /* Save current -mmultiple/-mno-multiple status. */
566 int multiple = TARGET_MULTIPLE;
567 /* Save current -mstring/-mno-string status. */
568 int string = TARGET_STRING;
570 /* Identify the processor type. */
571 rs6000_select[0].string = default_cpu;
572 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
574 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
576 ptr = &rs6000_select[i];
577 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
579 for (j = 0; j < ptt_size; j++)
580 if (! strcmp (ptr->string, processor_target_table[j].name))
582 if (ptr->set_tune_p)
583 rs6000_cpu = processor_target_table[j].processor;
585 if (ptr->set_arch_p)
587 target_flags |= processor_target_table[j].target_enable;
588 target_flags &= ~processor_target_table[j].target_disable;
590 break;
593 if (j == ptt_size)
594 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
598 if (rs6000_cpu == PROCESSOR_PPC8540)
599 rs6000_isel = 1;
601 /* If we are optimizing big endian systems for space, use the load/store
602 multiple and string instructions. */
603 if (BYTES_BIG_ENDIAN && optimize_size)
604 target_flags |= MASK_MULTIPLE | MASK_STRING;
606 /* If -mmultiple or -mno-multiple was explicitly used, don't
607 override with the processor default */
608 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
609 target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
611 /* If -mstring or -mno-string was explicitly used, don't override
612 with the processor default. */
613 if ((target_flags_explicit & MASK_STRING) != 0)
614 target_flags = (target_flags & ~MASK_STRING) | string;
616 /* Don't allow -mmultiple or -mstring on little endian systems
617 unless the cpu is a 750, because the hardware doesn't support the
618 instructions used in little endian mode, and causes an alignment
619 trap. The 750 does not cause an alignment trap (except when the
620 target is unaligned). */
622 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
624 if (TARGET_MULTIPLE)
626 target_flags &= ~MASK_MULTIPLE;
627 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
628 warning ("-mmultiple is not supported on little endian systems");
631 if (TARGET_STRING)
633 target_flags &= ~MASK_STRING;
634 if ((target_flags_explicit & MASK_STRING) != 0)
635 warning ("-mstring is not supported on little endian systems");
639 if (flag_pic != 0 && DEFAULT_ABI == ABI_AIX)
641 rs6000_flag_pic = flag_pic;
642 flag_pic = 0;
645 /* For Darwin, always silently make -fpic and -fPIC identical. */
646 if (flag_pic == 1 && DEFAULT_ABI == ABI_DARWIN)
647 flag_pic = 2;
649 /* Set debug flags */
650 if (rs6000_debug_name)
652 if (! strcmp (rs6000_debug_name, "all"))
653 rs6000_debug_stack = rs6000_debug_arg = 1;
654 else if (! strcmp (rs6000_debug_name, "stack"))
655 rs6000_debug_stack = 1;
656 else if (! strcmp (rs6000_debug_name, "arg"))
657 rs6000_debug_arg = 1;
658 else
659 error ("unknown -mdebug-%s switch", rs6000_debug_name);
662 if (rs6000_traceback_name)
664 if (! strncmp (rs6000_traceback_name, "full", 4))
665 rs6000_traceback = traceback_full;
666 else if (! strncmp (rs6000_traceback_name, "part", 4))
667 rs6000_traceback = traceback_part;
668 else if (! strncmp (rs6000_traceback_name, "no", 2))
669 rs6000_traceback = traceback_none;
670 else
671 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
672 rs6000_traceback_name);
675 /* Set size of long double */
676 rs6000_long_double_type_size = 64;
677 if (rs6000_long_double_size_string)
679 char *tail;
680 int size = strtol (rs6000_long_double_size_string, &tail, 10);
681 if (*tail != '\0' || (size != 64 && size != 128))
682 error ("Unknown switch -mlong-double-%s",
683 rs6000_long_double_size_string);
684 else
685 rs6000_long_double_type_size = size;
688 /* Handle -mabi= options. */
689 rs6000_parse_abi_options ();
691 /* Handle -mvrsave= option. */
692 rs6000_parse_vrsave_option ();
694 /* Handle -misel= option. */
695 rs6000_parse_isel_option ();
697 #ifdef SUBTARGET_OVERRIDE_OPTIONS
698 SUBTARGET_OVERRIDE_OPTIONS;
699 #endif
700 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
701 SUBSUBTARGET_OVERRIDE_OPTIONS;
702 #endif
704 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
705 using TARGET_OPTIONS to handle a toggle switch, but we're out of
706 bits in target_flags so TARGET_SWITCHES cannot be used.
707 Assumption here is that rs6000_longcall_switch points into the
708 text of the complete option, rather than being a copy, so we can
709 scan back for the presence or absence of the no- modifier. */
710 if (rs6000_longcall_switch)
712 const char *base = rs6000_longcall_switch;
713 while (base[-1] != 'm') base--;
715 if (*rs6000_longcall_switch != '\0')
716 error ("invalid option `%s'", base);
717 rs6000_default_long_calls = (base[0] != 'n');
720 #ifdef TARGET_REGNAMES
721 /* If the user desires alternate register names, copy in the
722 alternate names now. */
723 if (TARGET_REGNAMES)
724 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
725 #endif
727 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
728 If -maix-struct-return or -msvr4-struct-return was explicitly
729 used, don't override with the ABI default. */
730 if ((target_flags_explicit & MASK_AIX_STRUCT_RET) == 0)
732 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
733 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
734 else
735 target_flags |= MASK_AIX_STRUCT_RET;
738 if (TARGET_LONG_DOUBLE_128
739 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN))
740 real_format_for_mode[TFmode - QFmode] = &ibm_extended_format;
742 /* Allocate an alias set for register saves & restores from stack. */
743 rs6000_sr_alias_set = new_alias_set ();
745 if (TARGET_TOC)
746 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
748 /* We can only guarantee the availability of DI pseudo-ops when
749 assembling for 64-bit targets. */
750 if (!TARGET_64BIT)
752 targetm.asm_out.aligned_op.di = NULL;
753 targetm.asm_out.unaligned_op.di = NULL;
756 /* Set maximum branch target alignment at two instructions, eight bytes. */
757 align_jumps_max_skip = 8;
758 align_loops_max_skip = 8;
760 /* Arrange to save and restore machine status around nested functions. */
761 init_machine_status = rs6000_init_machine_status;
764 /* Handle -misel= option. */
765 static void
766 rs6000_parse_isel_option ()
768 if (rs6000_isel_string == 0)
769 return;
770 else if (! strcmp (rs6000_isel_string, "yes"))
771 rs6000_isel = 1;
772 else if (! strcmp (rs6000_isel_string, "no"))
773 rs6000_isel = 0;
774 else
775 error ("unknown -misel= option specified: '%s'",
776 rs6000_isel_string);
779 /* Handle -mvrsave= options. */
780 static void
781 rs6000_parse_vrsave_option ()
783 /* Generate VRSAVE instructions by default. */
784 if (rs6000_altivec_vrsave_string == 0
785 || ! strcmp (rs6000_altivec_vrsave_string, "yes"))
786 rs6000_altivec_vrsave = 1;
787 else if (! strcmp (rs6000_altivec_vrsave_string, "no"))
788 rs6000_altivec_vrsave = 0;
789 else
790 error ("unknown -mvrsave= option specified: '%s'",
791 rs6000_altivec_vrsave_string);
794 /* Handle -mabi= options. */
795 static void
796 rs6000_parse_abi_options ()
798 if (rs6000_abi_string == 0)
799 return;
800 else if (! strcmp (rs6000_abi_string, "altivec"))
801 rs6000_altivec_abi = 1;
802 else if (! strcmp (rs6000_abi_string, "no-altivec"))
803 rs6000_altivec_abi = 0;
804 else if (! strcmp (rs6000_abi_string, "spe"))
806 rs6000_spe_abi = 1;
807 if (!TARGET_SPE_ABI)
808 error ("not configured for ABI: '%s'", rs6000_abi_string);
811 else if (! strcmp (rs6000_abi_string, "no-spe"))
812 rs6000_spe_abi = 0;
813 else
814 error ("unknown ABI specified: '%s'", rs6000_abi_string);
817 void
818 optimization_options (level, size)
819 int level ATTRIBUTE_UNUSED;
820 int size ATTRIBUTE_UNUSED;
824 /* Do anything needed at the start of the asm file. */
826 void
827 rs6000_file_start (file, default_cpu)
828 FILE *file;
829 const char *default_cpu;
831 size_t i;
832 char buffer[80];
833 const char *start = buffer;
834 struct rs6000_cpu_select *ptr;
836 if (flag_verbose_asm)
838 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
839 rs6000_select[0].string = default_cpu;
841 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
843 ptr = &rs6000_select[i];
844 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
846 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
847 start = "";
851 #ifdef USING_ELFOS_H
852 switch (rs6000_sdata)
854 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
855 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
856 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
857 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
860 if (rs6000_sdata && g_switch_value)
862 fprintf (file, "%s -G %d", start, g_switch_value);
863 start = "";
865 #endif
867 if (*start == '\0')
868 putc ('\n', file);
872 /* Return nonzero if this function is known to have a null epilogue. */
875 direct_return ()
877 if (reload_completed)
879 rs6000_stack_t *info = rs6000_stack_info ();
881 if (info->first_gp_reg_save == 32
882 && info->first_fp_reg_save == 64
883 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
884 && ! info->lr_save_p
885 && ! info->cr_save_p
886 && info->vrsave_mask == 0
887 && ! info->push_p)
888 return 1;
891 return 0;
894 /* Returns 1 always. */
897 any_operand (op, mode)
898 rtx op ATTRIBUTE_UNUSED;
899 enum machine_mode mode ATTRIBUTE_UNUSED;
901 return 1;
904 /* Returns 1 if op is the count register. */
906 count_register_operand (op, mode)
907 rtx op;
908 enum machine_mode mode ATTRIBUTE_UNUSED;
910 if (GET_CODE (op) != REG)
911 return 0;
913 if (REGNO (op) == COUNT_REGISTER_REGNUM)
914 return 1;
916 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
917 return 1;
919 return 0;
922 /* Returns 1 if op is an altivec register. */
924 altivec_register_operand (op, mode)
925 rtx op;
926 enum machine_mode mode ATTRIBUTE_UNUSED;
929 return (register_operand (op, mode)
930 && (GET_CODE (op) != REG
931 || REGNO (op) > FIRST_PSEUDO_REGISTER
932 || ALTIVEC_REGNO_P (REGNO (op))));
936 xer_operand (op, mode)
937 rtx op;
938 enum machine_mode mode ATTRIBUTE_UNUSED;
940 if (GET_CODE (op) != REG)
941 return 0;
943 if (XER_REGNO_P (REGNO (op)))
944 return 1;
946 return 0;
949 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
950 by such constants completes more quickly. */
953 s8bit_cint_operand (op, mode)
954 rtx op;
955 enum machine_mode mode ATTRIBUTE_UNUSED;
957 return ( GET_CODE (op) == CONST_INT
958 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
961 /* Return 1 if OP is a constant that can fit in a D field. */
964 short_cint_operand (op, mode)
965 rtx op;
966 enum machine_mode mode ATTRIBUTE_UNUSED;
968 return (GET_CODE (op) == CONST_INT
969 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
972 /* Similar for an unsigned D field. */
975 u_short_cint_operand (op, mode)
976 rtx op;
977 enum machine_mode mode ATTRIBUTE_UNUSED;
979 return (GET_CODE (op) == CONST_INT
980 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
983 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
986 non_short_cint_operand (op, mode)
987 rtx op;
988 enum machine_mode mode ATTRIBUTE_UNUSED;
990 return (GET_CODE (op) == CONST_INT
991 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
994 /* Returns 1 if OP is a CONST_INT that is a positive value
995 and an exact power of 2. */
998 exact_log2_cint_operand (op, mode)
999 rtx op;
1000 enum machine_mode mode ATTRIBUTE_UNUSED;
1002 return (GET_CODE (op) == CONST_INT
1003 && INTVAL (op) > 0
1004 && exact_log2 (INTVAL (op)) >= 0);
1007 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
1008 ctr, or lr). */
1011 gpc_reg_operand (op, mode)
1012 rtx op;
1013 enum machine_mode mode;
1015 return (register_operand (op, mode)
1016 && (GET_CODE (op) != REG
1017 || (REGNO (op) >= ARG_POINTER_REGNUM
1018 && !XER_REGNO_P (REGNO (op)))
1019 || REGNO (op) < MQ_REGNO));
1022 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1023 CR field. */
1026 cc_reg_operand (op, mode)
1027 rtx op;
1028 enum machine_mode mode;
1030 return (register_operand (op, mode)
1031 && (GET_CODE (op) != REG
1032 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1033 || CR_REGNO_P (REGNO (op))));
1036 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1037 CR field that isn't CR0. */
1040 cc_reg_not_cr0_operand (op, mode)
1041 rtx op;
1042 enum machine_mode mode;
1044 return (register_operand (op, mode)
1045 && (GET_CODE (op) != REG
1046 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1047 || CR_REGNO_NOT_CR0_P (REGNO (op))));
1050 /* Returns 1 if OP is either a constant integer valid for a D-field or
1051 a non-special register. If a register, it must be in the proper
1052 mode unless MODE is VOIDmode. */
1055 reg_or_short_operand (op, mode)
1056 rtx op;
1057 enum machine_mode mode;
1059 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1062 /* Similar, except check if the negation of the constant would be
1063 valid for a D-field. */
1066 reg_or_neg_short_operand (op, mode)
1067 rtx op;
1068 enum machine_mode mode;
1070 if (GET_CODE (op) == CONST_INT)
1071 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
1073 return gpc_reg_operand (op, mode);
1076 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1077 a non-special register. If a register, it must be in the proper
1078 mode unless MODE is VOIDmode. */
1081 reg_or_aligned_short_operand (op, mode)
1082 rtx op;
1083 enum machine_mode mode;
1085 if (gpc_reg_operand (op, mode))
1086 return 1;
1087 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
1088 return 1;
1090 return 0;
1094 /* Return 1 if the operand is either a register or an integer whose
1095 high-order 16 bits are zero. */
1098 reg_or_u_short_operand (op, mode)
1099 rtx op;
1100 enum machine_mode mode;
1102 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1105 /* Return 1 is the operand is either a non-special register or ANY
1106 constant integer. */
1109 reg_or_cint_operand (op, mode)
1110 rtx op;
1111 enum machine_mode mode;
1113 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
1116 /* Return 1 is the operand is either a non-special register or ANY
1117 32-bit signed constant integer. */
1120 reg_or_arith_cint_operand (op, mode)
1121 rtx op;
1122 enum machine_mode mode;
1124 return (gpc_reg_operand (op, mode)
1125 || (GET_CODE (op) == CONST_INT
1126 #if HOST_BITS_PER_WIDE_INT != 32
1127 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
1128 < (unsigned HOST_WIDE_INT) 0x100000000ll)
1129 #endif
1133 /* Return 1 is the operand is either a non-special register or a 32-bit
1134 signed constant integer valid for 64-bit addition. */
1137 reg_or_add_cint64_operand (op, mode)
1138 rtx op;
1139 enum machine_mode mode;
1141 return (gpc_reg_operand (op, mode)
1142 || (GET_CODE (op) == CONST_INT
1143 #if HOST_BITS_PER_WIDE_INT == 32
1144 && INTVAL (op) < 0x7fff8000
1145 #else
1146 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
1147 < 0x100000000ll)
1148 #endif
1152 /* Return 1 is the operand is either a non-special register or a 32-bit
1153 signed constant integer valid for 64-bit subtraction. */
1156 reg_or_sub_cint64_operand (op, mode)
1157 rtx op;
1158 enum machine_mode mode;
1160 return (gpc_reg_operand (op, mode)
1161 || (GET_CODE (op) == CONST_INT
1162 #if HOST_BITS_PER_WIDE_INT == 32
1163 && (- INTVAL (op)) < 0x7fff8000
1164 #else
1165 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1166 < 0x100000000ll)
1167 #endif
1171 /* Return 1 is the operand is either a non-special register or ANY
1172 32-bit unsigned constant integer. */
1175 reg_or_logical_cint_operand (op, mode)
1176 rtx op;
1177 enum machine_mode mode;
1179 if (GET_CODE (op) == CONST_INT)
1181 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1183 if (GET_MODE_BITSIZE (mode) <= 32)
1184 abort ();
1186 if (INTVAL (op) < 0)
1187 return 0;
1190 return ((INTVAL (op) & GET_MODE_MASK (mode)
1191 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1193 else if (GET_CODE (op) == CONST_DOUBLE)
1195 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1196 || mode != DImode)
1197 abort ();
1199 return CONST_DOUBLE_HIGH (op) == 0;
1201 else
1202 return gpc_reg_operand (op, mode);
1205 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1208 got_operand (op, mode)
1209 rtx op;
1210 enum machine_mode mode ATTRIBUTE_UNUSED;
1212 return (GET_CODE (op) == SYMBOL_REF
1213 || GET_CODE (op) == CONST
1214 || GET_CODE (op) == LABEL_REF);
1217 /* Return 1 if the operand is a simple references that can be loaded via
1218 the GOT (labels involving addition aren't allowed). */
1221 got_no_const_operand (op, mode)
1222 rtx op;
1223 enum machine_mode mode ATTRIBUTE_UNUSED;
1225 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1228 /* Return the number of instructions it takes to form a constant in an
1229 integer register. */
1231 static int
1232 num_insns_constant_wide (value)
1233 HOST_WIDE_INT value;
1235 /* signed constant loadable with {cal|addi} */
1236 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1237 return 1;
1239 /* constant loadable with {cau|addis} */
1240 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1241 return 1;
1243 #if HOST_BITS_PER_WIDE_INT == 64
1244 else if (TARGET_POWERPC64)
1246 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1247 HOST_WIDE_INT high = value >> 31;
1249 if (high == 0 || high == -1)
1250 return 2;
1252 high >>= 1;
1254 if (low == 0)
1255 return num_insns_constant_wide (high) + 1;
1256 else
1257 return (num_insns_constant_wide (high)
1258 + num_insns_constant_wide (low) + 1);
1260 #endif
1262 else
1263 return 2;
1267 num_insns_constant (op, mode)
1268 rtx op;
1269 enum machine_mode mode;
1271 if (GET_CODE (op) == CONST_INT)
1273 #if HOST_BITS_PER_WIDE_INT == 64
1274 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1275 && mask64_operand (op, mode))
1276 return 2;
1277 else
1278 #endif
1279 return num_insns_constant_wide (INTVAL (op));
1282 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1284 long l;
1285 REAL_VALUE_TYPE rv;
1287 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1288 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1289 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1292 else if (GET_CODE (op) == CONST_DOUBLE)
1294 HOST_WIDE_INT low;
1295 HOST_WIDE_INT high;
1296 long l[2];
1297 REAL_VALUE_TYPE rv;
1298 int endian = (WORDS_BIG_ENDIAN == 0);
1300 if (mode == VOIDmode || mode == DImode)
1302 high = CONST_DOUBLE_HIGH (op);
1303 low = CONST_DOUBLE_LOW (op);
1305 else
1307 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1308 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1309 high = l[endian];
1310 low = l[1 - endian];
1313 if (TARGET_32BIT)
1314 return (num_insns_constant_wide (low)
1315 + num_insns_constant_wide (high));
1317 else
1319 if (high == 0 && low >= 0)
1320 return num_insns_constant_wide (low);
1322 else if (high == -1 && low < 0)
1323 return num_insns_constant_wide (low);
1325 else if (mask64_operand (op, mode))
1326 return 2;
1328 else if (low == 0)
1329 return num_insns_constant_wide (high) + 1;
1331 else
1332 return (num_insns_constant_wide (high)
1333 + num_insns_constant_wide (low) + 1);
1337 else
1338 abort ();
1341 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1342 register with one instruction per word. We only do this if we can
1343 safely read CONST_DOUBLE_{LOW,HIGH}. */
1346 easy_fp_constant (op, mode)
1347 rtx op;
1348 enum machine_mode mode;
1350 if (GET_CODE (op) != CONST_DOUBLE
1351 || GET_MODE (op) != mode
1352 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1353 return 0;
1355 /* Consider all constants with -msoft-float to be easy. */
1356 if ((TARGET_SOFT_FLOAT || !TARGET_FPRS)
1357 && mode != DImode)
1358 return 1;
1360 /* If we are using V.4 style PIC, consider all constants to be hard. */
1361 if (flag_pic && DEFAULT_ABI == ABI_V4)
1362 return 0;
1364 #ifdef TARGET_RELOCATABLE
1365 /* Similarly if we are using -mrelocatable, consider all constants
1366 to be hard. */
1367 if (TARGET_RELOCATABLE)
1368 return 0;
1369 #endif
1371 if (mode == TFmode)
1373 long k[4];
1374 REAL_VALUE_TYPE rv;
1376 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1377 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1379 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1380 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1
1381 && num_insns_constant_wide ((HOST_WIDE_INT) k[2]) == 1
1382 && num_insns_constant_wide ((HOST_WIDE_INT) k[3]) == 1);
1385 else if (mode == DFmode)
1387 long k[2];
1388 REAL_VALUE_TYPE rv;
1390 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1391 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1393 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1394 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1397 else if (mode == SFmode)
1399 long l;
1400 REAL_VALUE_TYPE rv;
1402 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1403 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1405 return num_insns_constant_wide (l) == 1;
1408 else if (mode == DImode)
1409 return ((TARGET_POWERPC64
1410 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1411 || (num_insns_constant (op, DImode) <= 2));
1413 else if (mode == SImode)
1414 return 1;
1415 else
1416 abort ();
1419 /* Return 1 if the operand is a CONST_INT and can be put into a
1420 register with one instruction. */
1422 static int
1423 easy_vector_constant (op)
1424 rtx op;
1426 rtx elt;
1427 int units, i;
1429 if (GET_CODE (op) != CONST_VECTOR)
1430 return 0;
1432 units = CONST_VECTOR_NUNITS (op);
1434 /* We can generate 0 easily. Look for that. */
1435 for (i = 0; i < units; ++i)
1437 elt = CONST_VECTOR_ELT (op, i);
1439 /* We could probably simplify this by just checking for equality
1440 with CONST0_RTX for the current mode, but let's be safe
1441 instead. */
1443 switch (GET_CODE (elt))
1445 case CONST_INT:
1446 if (INTVAL (elt) != 0)
1447 return 0;
1448 break;
1449 case CONST_DOUBLE:
1450 if (CONST_DOUBLE_LOW (elt) != 0 || CONST_DOUBLE_HIGH (elt) != 0)
1451 return 0;
1452 break;
1453 default:
1454 return 0;
1458 /* We could probably generate a few other constants trivially, but
1459 gcc doesn't generate them yet. FIXME later. */
1460 return 1;
1463 /* Return 1 if the operand is the constant 0. This works for scalars
1464 as well as vectors. */
1466 zero_constant (op, mode)
1467 rtx op;
1468 enum machine_mode mode;
1470 return op == CONST0_RTX (mode);
1473 /* Return 1 if the operand is 0.0. */
1475 zero_fp_constant (op, mode)
1476 rtx op;
1477 enum machine_mode mode;
1479 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1482 /* Return 1 if the operand is in volatile memory. Note that during
1483 the RTL generation phase, memory_operand does not return TRUE for
1484 volatile memory references. So this function allows us to
1485 recognize volatile references where its safe. */
1488 volatile_mem_operand (op, mode)
1489 rtx op;
1490 enum machine_mode mode;
1492 if (GET_CODE (op) != MEM)
1493 return 0;
1495 if (!MEM_VOLATILE_P (op))
1496 return 0;
1498 if (mode != GET_MODE (op))
1499 return 0;
1501 if (reload_completed)
1502 return memory_operand (op, mode);
1504 if (reload_in_progress)
1505 return strict_memory_address_p (mode, XEXP (op, 0));
1507 return memory_address_p (mode, XEXP (op, 0));
1510 /* Return 1 if the operand is an offsettable memory operand. */
1513 offsettable_mem_operand (op, mode)
1514 rtx op;
1515 enum machine_mode mode;
1517 return ((GET_CODE (op) == MEM)
1518 && offsettable_address_p (reload_completed || reload_in_progress,
1519 mode, XEXP (op, 0)));
1522 /* Return 1 if the operand is either an easy FP constant (see above) or
1523 memory. */
1526 mem_or_easy_const_operand (op, mode)
1527 rtx op;
1528 enum machine_mode mode;
1530 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1533 /* Return 1 if the operand is either a non-special register or an item
1534 that can be used as the operand of a `mode' add insn. */
1537 add_operand (op, mode)
1538 rtx op;
1539 enum machine_mode mode;
1541 if (GET_CODE (op) == CONST_INT)
1542 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1543 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1545 return gpc_reg_operand (op, mode);
1548 /* Return 1 if OP is a constant but not a valid add_operand. */
1551 non_add_cint_operand (op, mode)
1552 rtx op;
1553 enum machine_mode mode ATTRIBUTE_UNUSED;
1555 return (GET_CODE (op) == CONST_INT
1556 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1557 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1560 /* Return 1 if the operand is a non-special register or a constant that
1561 can be used as the operand of an OR or XOR insn on the RS/6000. */
1564 logical_operand (op, mode)
1565 rtx op;
1566 enum machine_mode mode;
1568 HOST_WIDE_INT opl, oph;
1570 if (gpc_reg_operand (op, mode))
1571 return 1;
1573 if (GET_CODE (op) == CONST_INT)
1575 opl = INTVAL (op) & GET_MODE_MASK (mode);
1577 #if HOST_BITS_PER_WIDE_INT <= 32
1578 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1579 return 0;
1580 #endif
1582 else if (GET_CODE (op) == CONST_DOUBLE)
1584 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1585 abort ();
1587 opl = CONST_DOUBLE_LOW (op);
1588 oph = CONST_DOUBLE_HIGH (op);
1589 if (oph != 0)
1590 return 0;
1592 else
1593 return 0;
1595 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1596 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1599 /* Return 1 if C is a constant that is not a logical operand (as
1600 above), but could be split into one. */
1603 non_logical_cint_operand (op, mode)
1604 rtx op;
1605 enum machine_mode mode;
1607 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1608 && ! logical_operand (op, mode)
1609 && reg_or_logical_cint_operand (op, mode));
1612 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1613 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1614 Reject all ones and all zeros, since these should have been optimized
1615 away and confuse the making of MB and ME. */
1618 mask_operand (op, mode)
1619 rtx op;
1620 enum machine_mode mode ATTRIBUTE_UNUSED;
1622 HOST_WIDE_INT c, lsb;
1624 if (GET_CODE (op) != CONST_INT)
1625 return 0;
1627 c = INTVAL (op);
1629 /* Fail in 64-bit mode if the mask wraps around because the upper
1630 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1631 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
1632 return 0;
1634 /* We don't change the number of transitions by inverting,
1635 so make sure we start with the LS bit zero. */
1636 if (c & 1)
1637 c = ~c;
1639 /* Reject all zeros or all ones. */
1640 if (c == 0)
1641 return 0;
1643 /* Find the first transition. */
1644 lsb = c & -c;
1646 /* Invert to look for a second transition. */
1647 c = ~c;
1649 /* Erase first transition. */
1650 c &= -lsb;
1652 /* Find the second transition (if any). */
1653 lsb = c & -c;
1655 /* Match if all the bits above are 1's (or c is zero). */
1656 return c == -lsb;
1659 /* Return 1 for the PowerPC64 rlwinm corner case. */
1662 mask_operand_wrap (op, mode)
1663 rtx op;
1664 enum machine_mode mode ATTRIBUTE_UNUSED;
1666 HOST_WIDE_INT c, lsb;
1668 if (GET_CODE (op) != CONST_INT)
1669 return 0;
1671 c = INTVAL (op);
1673 if ((c & 0x80000001) != 0x80000001)
1674 return 0;
1676 c = ~c;
1677 if (c == 0)
1678 return 0;
1680 lsb = c & -c;
1681 c = ~c;
1682 c &= -lsb;
1683 lsb = c & -c;
1684 return c == -lsb;
1687 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1688 It is if there are no more than one 1->0 or 0->1 transitions.
1689 Reject all zeros, since zero should have been optimized away and
1690 confuses the making of MB and ME. */
1693 mask64_operand (op, mode)
1694 rtx op;
1695 enum machine_mode mode ATTRIBUTE_UNUSED;
1697 if (GET_CODE (op) == CONST_INT)
1699 HOST_WIDE_INT c, lsb;
1701 c = INTVAL (op);
1703 /* Reject all zeros. */
1704 if (c == 0)
1705 return 0;
1707 /* We don't change the number of transitions by inverting,
1708 so make sure we start with the LS bit zero. */
1709 if (c & 1)
1710 c = ~c;
1712 /* Find the transition, and check that all bits above are 1's. */
1713 lsb = c & -c;
1715 /* Match if all the bits above are 1's (or c is zero). */
1716 return c == -lsb;
1718 return 0;
1721 /* Like mask64_operand, but allow up to three transitions. This
1722 predicate is used by insn patterns that generate two rldicl or
1723 rldicr machine insns. */
1726 mask64_2_operand (op, mode)
1727 rtx op;
1728 enum machine_mode mode ATTRIBUTE_UNUSED;
1730 if (GET_CODE (op) == CONST_INT)
1732 HOST_WIDE_INT c, lsb;
1734 c = INTVAL (op);
1736 /* Disallow all zeros. */
1737 if (c == 0)
1738 return 0;
1740 /* We don't change the number of transitions by inverting,
1741 so make sure we start with the LS bit zero. */
1742 if (c & 1)
1743 c = ~c;
1745 /* Find the first transition. */
1746 lsb = c & -c;
1748 /* Invert to look for a second transition. */
1749 c = ~c;
1751 /* Erase first transition. */
1752 c &= -lsb;
1754 /* Find the second transition. */
1755 lsb = c & -c;
1757 /* Invert to look for a third transition. */
1758 c = ~c;
1760 /* Erase second transition. */
1761 c &= -lsb;
1763 /* Find the third transition (if any). */
1764 lsb = c & -c;
1766 /* Match if all the bits above are 1's (or c is zero). */
1767 return c == -lsb;
1769 return 0;
1772 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
1773 implement ANDing by the mask IN. */
1774 void
1775 build_mask64_2_operands (in, out)
1776 rtx in;
1777 rtx *out;
1779 #if HOST_BITS_PER_WIDE_INT >= 64
1780 unsigned HOST_WIDE_INT c, lsb, m1, m2;
1781 int shift;
1783 if (GET_CODE (in) != CONST_INT)
1784 abort ();
1786 c = INTVAL (in);
1787 if (c & 1)
1789 /* Assume c initially something like 0x00fff000000fffff. The idea
1790 is to rotate the word so that the middle ^^^^^^ group of zeros
1791 is at the MS end and can be cleared with an rldicl mask. We then
1792 rotate back and clear off the MS ^^ group of zeros with a
1793 second rldicl. */
1794 c = ~c; /* c == 0xff000ffffff00000 */
1795 lsb = c & -c; /* lsb == 0x0000000000100000 */
1796 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
1797 c = ~c; /* c == 0x00fff000000fffff */
1798 c &= -lsb; /* c == 0x00fff00000000000 */
1799 lsb = c & -c; /* lsb == 0x0000100000000000 */
1800 c = ~c; /* c == 0xff000fffffffffff */
1801 c &= -lsb; /* c == 0xff00000000000000 */
1802 shift = 0;
1803 while ((lsb >>= 1) != 0)
1804 shift++; /* shift == 44 on exit from loop */
1805 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
1806 m1 = ~m1; /* m1 == 0x000000ffffffffff */
1807 m2 = ~c; /* m2 == 0x00ffffffffffffff */
1809 else
1811 /* Assume c initially something like 0xff000f0000000000. The idea
1812 is to rotate the word so that the ^^^ middle group of zeros
1813 is at the LS end and can be cleared with an rldicr mask. We then
1814 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
1815 a second rldicr. */
1816 lsb = c & -c; /* lsb == 0x0000010000000000 */
1817 m2 = -lsb; /* m2 == 0xffffff0000000000 */
1818 c = ~c; /* c == 0x00fff0ffffffffff */
1819 c &= -lsb; /* c == 0x00fff00000000000 */
1820 lsb = c & -c; /* lsb == 0x0000100000000000 */
1821 c = ~c; /* c == 0xff000fffffffffff */
1822 c &= -lsb; /* c == 0xff00000000000000 */
1823 shift = 0;
1824 while ((lsb >>= 1) != 0)
1825 shift++; /* shift == 44 on exit from loop */
1826 m1 = ~c; /* m1 == 0x00ffffffffffffff */
1827 m1 >>= shift; /* m1 == 0x0000000000000fff */
1828 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
1831 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
1832 masks will be all 1's. We are guaranteed more than one transition. */
1833 out[0] = GEN_INT (64 - shift);
1834 out[1] = GEN_INT (m1);
1835 out[2] = GEN_INT (shift);
1836 out[3] = GEN_INT (m2);
1837 #else
1838 (void)in;
1839 (void)out;
1840 abort ();
1841 #endif
1844 /* Return 1 if the operand is either a non-special register or a constant
1845 that can be used as the operand of a PowerPC64 logical AND insn. */
1848 and64_operand (op, mode)
1849 rtx op;
1850 enum machine_mode mode;
1852 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1853 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
1855 return (logical_operand (op, mode) || mask64_operand (op, mode));
1858 /* Like the above, but also match constants that can be implemented
1859 with two rldicl or rldicr insns. */
1862 and64_2_operand (op, mode)
1863 rtx op;
1864 enum machine_mode mode;
1866 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1867 return gpc_reg_operand (op, mode) || mask64_2_operand (op, mode);
1869 return logical_operand (op, mode) || mask64_2_operand (op, mode);
1872 /* Return 1 if the operand is either a non-special register or a
1873 constant that can be used as the operand of an RS/6000 logical AND insn. */
1876 and_operand (op, mode)
1877 rtx op;
1878 enum machine_mode mode;
1880 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1881 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
1883 return (logical_operand (op, mode) || mask_operand (op, mode));
1886 /* Return 1 if the operand is a general register or memory operand. */
1889 reg_or_mem_operand (op, mode)
1890 rtx op;
1891 enum machine_mode mode;
1893 return (gpc_reg_operand (op, mode)
1894 || memory_operand (op, mode)
1895 || volatile_mem_operand (op, mode));
1898 /* Return 1 if the operand is a general register or memory operand without
1899 pre_inc or pre_dec which produces invalid form of PowerPC lwa
1900 instruction. */
1903 lwa_operand (op, mode)
1904 rtx op;
1905 enum machine_mode mode;
1907 rtx inner = op;
1909 if (reload_completed && GET_CODE (inner) == SUBREG)
1910 inner = SUBREG_REG (inner);
1912 return gpc_reg_operand (inner, mode)
1913 || (memory_operand (inner, mode)
1914 && GET_CODE (XEXP (inner, 0)) != PRE_INC
1915 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
1916 && (GET_CODE (XEXP (inner, 0)) != PLUS
1917 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
1918 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
1921 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
1924 symbol_ref_operand (op, mode)
1925 rtx op;
1926 enum machine_mode mode;
1928 if (mode != VOIDmode && GET_MODE (op) != mode)
1929 return 0;
1931 return (GET_CODE (op) == SYMBOL_REF);
1934 /* Return 1 if the operand, used inside a MEM, is a valid first argument
1935 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
1938 call_operand (op, mode)
1939 rtx op;
1940 enum machine_mode mode;
1942 if (mode != VOIDmode && GET_MODE (op) != mode)
1943 return 0;
1945 return (GET_CODE (op) == SYMBOL_REF
1946 || (GET_CODE (op) == REG
1947 && (REGNO (op) == LINK_REGISTER_REGNUM
1948 || REGNO (op) == COUNT_REGISTER_REGNUM
1949 || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
1952 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
1953 this file and the function is not weakly defined. */
1956 current_file_function_operand (op, mode)
1957 rtx op;
1958 enum machine_mode mode ATTRIBUTE_UNUSED;
1960 return (GET_CODE (op) == SYMBOL_REF
1961 && (SYMBOL_REF_FLAG (op)
1962 || (op == XEXP (DECL_RTL (current_function_decl), 0)
1963 && ! DECL_WEAK (current_function_decl))));
1966 /* Return 1 if this operand is a valid input for a move insn. */
1969 input_operand (op, mode)
1970 rtx op;
1971 enum machine_mode mode;
1973 /* Memory is always valid. */
1974 if (memory_operand (op, mode))
1975 return 1;
1977 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
1978 if (GET_CODE (op) == CONSTANT_P_RTX)
1979 return 1;
1981 /* For floating-point, easy constants are valid. */
1982 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1983 && CONSTANT_P (op)
1984 && easy_fp_constant (op, mode))
1985 return 1;
1987 /* Allow any integer constant. */
1988 if (GET_MODE_CLASS (mode) == MODE_INT
1989 && (GET_CODE (op) == CONST_INT
1990 || GET_CODE (op) == CONST_DOUBLE))
1991 return 1;
1993 /* For floating-point or multi-word mode, the only remaining valid type
1994 is a register. */
1995 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1996 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1997 return register_operand (op, mode);
1999 /* The only cases left are integral modes one word or smaller (we
2000 do not get called for MODE_CC values). These can be in any
2001 register. */
2002 if (register_operand (op, mode))
2003 return 1;
2005 /* A SYMBOL_REF referring to the TOC is valid. */
2006 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (op))
2007 return 1;
2009 /* A constant pool expression (relative to the TOC) is valid */
2010 if (TOC_RELATIVE_EXPR_P (op))
2011 return 1;
2013 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
2014 to be valid. */
2015 if (DEFAULT_ABI == ABI_V4
2016 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
2017 && small_data_operand (op, Pmode))
2018 return 1;
2020 return 0;
2023 /* Return 1 for an operand in small memory on V.4/eabi. */
2026 small_data_operand (op, mode)
2027 rtx op ATTRIBUTE_UNUSED;
2028 enum machine_mode mode ATTRIBUTE_UNUSED;
2030 #if TARGET_ELF
2031 rtx sym_ref;
2033 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
2034 return 0;
2036 if (DEFAULT_ABI != ABI_V4)
2037 return 0;
2039 if (GET_CODE (op) == SYMBOL_REF)
2040 sym_ref = op;
2042 else if (GET_CODE (op) != CONST
2043 || GET_CODE (XEXP (op, 0)) != PLUS
2044 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2045 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
2046 return 0;
2048 else
2050 rtx sum = XEXP (op, 0);
2051 HOST_WIDE_INT summand;
2053 /* We have to be careful here, because it is the referenced address
2054 that must be 32k from _SDA_BASE_, not just the symbol. */
2055 summand = INTVAL (XEXP (sum, 1));
2056 if (summand < 0 || summand > g_switch_value)
2057 return 0;
2059 sym_ref = XEXP (sum, 0);
2062 if (*XSTR (sym_ref, 0) != '@')
2063 return 0;
2065 return 1;
2067 #else
2068 return 0;
2069 #endif
2072 static int
2073 constant_pool_expr_1 (op, have_sym, have_toc)
2074 rtx op;
2075 int *have_sym;
2076 int *have_toc;
2078 switch (GET_CODE(op))
2080 case SYMBOL_REF:
2081 if (CONSTANT_POOL_ADDRESS_P (op))
2083 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2085 *have_sym = 1;
2086 return 1;
2088 else
2089 return 0;
2091 else if (! strcmp (XSTR (op, 0), toc_label_name))
2093 *have_toc = 1;
2094 return 1;
2096 else
2097 return 0;
2098 case PLUS:
2099 case MINUS:
2100 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2101 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2102 case CONST:
2103 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2104 case CONST_INT:
2105 return 1;
2106 default:
2107 return 0;
2112 constant_pool_expr_p (op)
2113 rtx op;
2115 int have_sym = 0;
2116 int have_toc = 0;
2117 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2121 toc_relative_expr_p (op)
2122 rtx op;
2124 int have_sym = 0;
2125 int have_toc = 0;
2126 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2129 /* Try machine-dependent ways of modifying an illegitimate address
2130 to be legitimate. If we find one, return the new, valid address.
2131 This is used from only one place: `memory_address' in explow.c.
2133 OLDX is the address as it was before break_out_memory_refs was
2134 called. In some cases it is useful to look at this to decide what
2135 needs to be done.
2137 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2139 It is always safe for this function to do nothing. It exists to
2140 recognize opportunities to optimize the output.
2142 On RS/6000, first check for the sum of a register with a constant
2143 integer that is out of range. If so, generate code to add the
2144 constant with the low-order 16 bits masked to the register and force
2145 this result into another register (this can be done with `cau').
2146 Then generate an address of REG+(CONST&0xffff), allowing for the
2147 possibility of bit 16 being a one.
2149 Then check for the sum of a register and something not constant, try to
2150 load the other things into a register and return the sum. */
2152 rs6000_legitimize_address (x, oldx, mode)
2153 rtx x;
2154 rtx oldx ATTRIBUTE_UNUSED;
2155 enum machine_mode mode;
2157 if (GET_CODE (x) == PLUS
2158 && GET_CODE (XEXP (x, 0)) == REG
2159 && GET_CODE (XEXP (x, 1)) == CONST_INT
2160 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2162 HOST_WIDE_INT high_int, low_int;
2163 rtx sum;
2164 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2165 high_int = INTVAL (XEXP (x, 1)) - low_int;
2166 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2167 GEN_INT (high_int)), 0);
2168 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2170 else if (GET_CODE (x) == PLUS
2171 && GET_CODE (XEXP (x, 0)) == REG
2172 && GET_CODE (XEXP (x, 1)) != CONST_INT
2173 && GET_MODE_NUNITS (mode) == 1
2174 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2175 || TARGET_POWERPC64
2176 || (mode != DFmode && mode != TFmode))
2177 && (TARGET_POWERPC64 || mode != DImode)
2178 && mode != TImode)
2180 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
2181 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
2183 else if (ALTIVEC_VECTOR_MODE (mode))
2185 rtx reg;
2187 /* Make sure both operands are registers. */
2188 if (GET_CODE (x) == PLUS)
2189 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
2190 force_reg (Pmode, XEXP (x, 1)));
2192 reg = force_reg (Pmode, x);
2193 return reg;
2195 else if (SPE_VECTOR_MODE (mode))
2197 /* We accept [reg + reg] and [reg + OFFSET]. */
2199 if (GET_CODE (x) == PLUS)
2201 rtx op1 = XEXP (x, 0);
2202 rtx op2 = XEXP (x, 1);
2204 op1 = force_reg (Pmode, op1);
2206 if (GET_CODE (op2) != REG
2207 && (GET_CODE (op2) != CONST_INT
2208 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
2209 op2 = force_reg (Pmode, op2);
2211 return gen_rtx_PLUS (Pmode, op1, op2);
2214 return force_reg (Pmode, x);
2216 else if (TARGET_ELF && TARGET_32BIT && TARGET_NO_TOC && ! flag_pic
2217 && GET_CODE (x) != CONST_INT
2218 && GET_CODE (x) != CONST_DOUBLE
2219 && CONSTANT_P (x)
2220 && GET_MODE_NUNITS (mode) == 1
2221 && (GET_MODE_BITSIZE (mode) <= 32
2222 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
2224 rtx reg = gen_reg_rtx (Pmode);
2225 emit_insn (gen_elf_high (reg, (x)));
2226 return gen_rtx_LO_SUM (Pmode, reg, (x));
2228 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
2229 && ! flag_pic
2230 && GET_CODE (x) != CONST_INT
2231 && GET_CODE (x) != CONST_DOUBLE
2232 && CONSTANT_P (x)
2233 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
2234 && mode != DImode
2235 && mode != TImode)
2237 rtx reg = gen_reg_rtx (Pmode);
2238 emit_insn (gen_macho_high (reg, (x)));
2239 return gen_rtx_LO_SUM (Pmode, reg, (x));
2241 else if (TARGET_TOC
2242 && CONSTANT_POOL_EXPR_P (x)
2243 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
2245 return create_TOC_reference (x);
2247 else
2248 return NULL_RTX;
2251 /* The convention appears to be to define this wherever it is used.
2252 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
2253 is now used here. */
2254 #ifndef REG_MODE_OK_FOR_BASE_P
2255 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
2256 #endif
2258 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
2259 replace the input X, or the original X if no replacement is called for.
2260 The output parameter *WIN is 1 if the calling macro should goto WIN,
2261 0 if it should not.
2263 For RS/6000, we wish to handle large displacements off a base
2264 register by splitting the addend across an addiu/addis and the mem insn.
2265 This cuts number of extra insns needed from 3 to 1.
2267 On Darwin, we use this to generate code for floating point constants.
2268 A movsf_low is generated so we wind up with 2 instructions rather than 3.
2269 The Darwin code is inside #if TARGET_MACHO because only then is
2270 machopic_function_base_name() defined. */
2272 rs6000_legitimize_reload_address (x, mode, opnum, type, ind_levels, win)
2273 rtx x;
2274 enum machine_mode mode;
2275 int opnum;
2276 int type;
2277 int ind_levels ATTRIBUTE_UNUSED;
2278 int *win;
2280 /* We must recognize output that we have already generated ourselves. */
2281 if (GET_CODE (x) == PLUS
2282 && GET_CODE (XEXP (x, 0)) == PLUS
2283 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
2284 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2285 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2287 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2288 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2289 opnum, (enum reload_type)type);
2290 *win = 1;
2291 return x;
2294 #if TARGET_MACHO
2295 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
2296 && GET_CODE (x) == LO_SUM
2297 && GET_CODE (XEXP (x, 0)) == PLUS
2298 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
2299 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
2300 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
2301 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
2302 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
2303 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
2304 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
2306 /* Result of previous invocation of this function on Darwin
2307 floating point constant. */
2308 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2309 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2310 opnum, (enum reload_type)type);
2311 *win = 1;
2312 return x;
2314 #endif
2315 if (GET_CODE (x) == PLUS
2316 && GET_CODE (XEXP (x, 0)) == REG
2317 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2318 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
2319 && GET_CODE (XEXP (x, 1)) == CONST_INT
2320 && !SPE_VECTOR_MODE (mode)
2321 && !ALTIVEC_VECTOR_MODE (mode))
2323 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
2324 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
2325 HOST_WIDE_INT high
2326 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
2328 /* Check for 32-bit overflow. */
2329 if (high + low != val)
2331 *win = 0;
2332 return x;
2335 /* Reload the high part into a base reg; leave the low part
2336 in the mem directly. */
2338 x = gen_rtx_PLUS (GET_MODE (x),
2339 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
2340 GEN_INT (high)),
2341 GEN_INT (low));
2343 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2344 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2345 opnum, (enum reload_type)type);
2346 *win = 1;
2347 return x;
2349 #if TARGET_MACHO
2350 if (GET_CODE (x) == SYMBOL_REF
2351 && DEFAULT_ABI == ABI_DARWIN
2352 && !ALTIVEC_VECTOR_MODE (mode)
2353 && flag_pic)
2355 /* Darwin load of floating point constant. */
2356 rtx offset = gen_rtx (CONST, Pmode,
2357 gen_rtx (MINUS, Pmode, x,
2358 gen_rtx (SYMBOL_REF, Pmode,
2359 machopic_function_base_name ())));
2360 x = gen_rtx (LO_SUM, GET_MODE (x),
2361 gen_rtx (PLUS, Pmode, pic_offset_table_rtx,
2362 gen_rtx (HIGH, Pmode, offset)), offset);
2363 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2364 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2365 opnum, (enum reload_type)type);
2366 *win = 1;
2367 return x;
2369 #endif
2370 if (TARGET_TOC
2371 && CONSTANT_POOL_EXPR_P (x)
2372 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
2374 (x) = create_TOC_reference (x);
2375 *win = 1;
2376 return x;
2378 *win = 0;
2379 return x;
2382 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2383 that is a valid memory address for an instruction.
2384 The MODE argument is the machine mode for the MEM expression
2385 that wants to use this address.
2387 On the RS/6000, there are four valid address: a SYMBOL_REF that
2388 refers to a constant pool entry of an address (or the sum of it
2389 plus a constant), a short (16-bit signed) constant plus a register,
2390 the sum of two registers, or a register indirect, possibly with an
2391 auto-increment. For DFmode and DImode with a constant plus register,
2392 we must ensure that both words are addressable or PowerPC64 with offset
2393 word aligned.
2395 For modes spanning multiple registers (DFmode in 32-bit GPRs,
2396 32-bit DImode, TImode), indexed addressing cannot be used because
2397 adjacent memory cells are accessed by adding word-sized offsets
2398 during assembly output. */
2400 rs6000_legitimate_address (mode, x, reg_ok_strict)
2401 enum machine_mode mode;
2402 rtx x;
2403 int reg_ok_strict;
2405 if (LEGITIMATE_INDIRECT_ADDRESS_P (x, reg_ok_strict))
2406 return 1;
2407 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2408 && !ALTIVEC_VECTOR_MODE (mode)
2409 && !SPE_VECTOR_MODE (mode)
2410 && TARGET_UPDATE
2411 && LEGITIMATE_INDIRECT_ADDRESS_P (XEXP (x, 0), reg_ok_strict))
2412 return 1;
2413 if (LEGITIMATE_SMALL_DATA_P (mode, x))
2414 return 1;
2415 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
2416 return 1;
2417 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
2418 if (! reg_ok_strict
2419 && GET_CODE (x) == PLUS
2420 && GET_CODE (XEXP (x, 0)) == REG
2421 && XEXP (x, 0) == virtual_stack_vars_rtx
2422 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2423 return 1;
2424 if (LEGITIMATE_OFFSET_ADDRESS_P (mode, x, reg_ok_strict))
2425 return 1;
2426 if (mode != TImode
2427 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2428 || TARGET_POWERPC64
2429 || (mode != DFmode && mode != TFmode))
2430 && (TARGET_POWERPC64 || mode != DImode)
2431 && LEGITIMATE_INDEXED_ADDRESS_P (x, reg_ok_strict))
2432 return 1;
2433 if (LEGITIMATE_LO_SUM_ADDRESS_P (mode, x, reg_ok_strict))
2434 return 1;
2435 return 0;
2438 /* Try to output insns to set TARGET equal to the constant C if it can
2439 be done in less than N insns. Do all computations in MODE.
2440 Returns the place where the output has been placed if it can be
2441 done and the insns have been emitted. If it would take more than N
2442 insns, zero is returned and no insns and emitted. */
2445 rs6000_emit_set_const (dest, mode, source, n)
2446 rtx dest, source;
2447 enum machine_mode mode;
2448 int n ATTRIBUTE_UNUSED;
2450 rtx result, insn, set;
2451 HOST_WIDE_INT c0, c1;
2453 if (mode == QImode || mode == HImode)
2455 if (dest == NULL)
2456 dest = gen_reg_rtx (mode);
2457 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
2458 return dest;
2460 else if (mode == SImode)
2462 result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
2464 emit_insn (gen_rtx_SET (VOIDmode, result,
2465 GEN_INT (INTVAL (source)
2466 & (~ (HOST_WIDE_INT) 0xffff))));
2467 emit_insn (gen_rtx_SET (VOIDmode, dest,
2468 gen_rtx_IOR (SImode, result,
2469 GEN_INT (INTVAL (source) & 0xffff))));
2470 result = dest;
2472 else if (mode == DImode)
2474 if (GET_CODE (source) == CONST_INT)
2476 c0 = INTVAL (source);
2477 c1 = -(c0 < 0);
2479 else if (GET_CODE (source) == CONST_DOUBLE)
2481 #if HOST_BITS_PER_WIDE_INT >= 64
2482 c0 = CONST_DOUBLE_LOW (source);
2483 c1 = -(c0 < 0);
2484 #else
2485 c0 = CONST_DOUBLE_LOW (source);
2486 c1 = CONST_DOUBLE_HIGH (source);
2487 #endif
2489 else
2490 abort ();
2492 result = rs6000_emit_set_long_const (dest, c0, c1);
2494 else
2495 abort ();
2497 insn = get_last_insn ();
2498 set = single_set (insn);
2499 if (! CONSTANT_P (SET_SRC (set)))
2500 set_unique_reg_note (insn, REG_EQUAL, source);
2502 return result;
2505 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
2506 fall back to a straight forward decomposition. We do this to avoid
2507 exponential run times encountered when looking for longer sequences
2508 with rs6000_emit_set_const. */
2509 static rtx
2510 rs6000_emit_set_long_const (dest, c1, c2)
2511 rtx dest;
2512 HOST_WIDE_INT c1, c2;
2514 if (!TARGET_POWERPC64)
2516 rtx operand1, operand2;
2518 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
2519 DImode);
2520 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
2521 DImode);
2522 emit_move_insn (operand1, GEN_INT (c1));
2523 emit_move_insn (operand2, GEN_INT (c2));
2525 else
2527 HOST_WIDE_INT ud1, ud2, ud3, ud4;
2529 ud1 = c1 & 0xffff;
2530 ud2 = (c1 & 0xffff0000) >> 16;
2531 #if HOST_BITS_PER_WIDE_INT >= 64
2532 c2 = c1 >> 32;
2533 #endif
2534 ud3 = c2 & 0xffff;
2535 ud4 = (c2 & 0xffff0000) >> 16;
2537 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
2538 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2540 if (ud1 & 0x8000)
2541 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
2542 else
2543 emit_move_insn (dest, GEN_INT (ud1));
2546 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
2547 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
2549 if (ud2 & 0x8000)
2550 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
2551 - 0x80000000));
2552 else
2553 emit_move_insn (dest, GEN_INT (ud2 << 16));
2554 if (ud1 != 0)
2555 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2557 else if ((ud4 == 0xffff && (ud3 & 0x8000))
2558 || (ud4 == 0 && ! (ud3 & 0x8000)))
2560 if (ud3 & 0x8000)
2561 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
2562 - 0x80000000));
2563 else
2564 emit_move_insn (dest, GEN_INT (ud3 << 16));
2566 if (ud2 != 0)
2567 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
2568 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
2569 if (ud1 != 0)
2570 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2572 else
2574 if (ud4 & 0x8000)
2575 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
2576 - 0x80000000));
2577 else
2578 emit_move_insn (dest, GEN_INT (ud4 << 16));
2580 if (ud3 != 0)
2581 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
2583 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
2584 if (ud2 != 0)
2585 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
2586 GEN_INT (ud2 << 16)));
2587 if (ud1 != 0)
2588 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2591 return dest;
2594 /* Emit a move from SOURCE to DEST in mode MODE. */
2595 void
2596 rs6000_emit_move (dest, source, mode)
2597 rtx dest;
2598 rtx source;
2599 enum machine_mode mode;
2601 rtx operands[2];
2602 operands[0] = dest;
2603 operands[1] = source;
2605 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
2606 if (GET_CODE (operands[1]) == CONST_DOUBLE
2607 && ! FLOAT_MODE_P (mode)
2608 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2610 /* FIXME. This should never happen. */
2611 /* Since it seems that it does, do the safe thing and convert
2612 to a CONST_INT. */
2613 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
2615 if (GET_CODE (operands[1]) == CONST_DOUBLE
2616 && ! FLOAT_MODE_P (mode)
2617 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
2618 && CONST_DOUBLE_LOW (operands[1]) >= 0)
2619 || (CONST_DOUBLE_HIGH (operands[1]) == -1
2620 && CONST_DOUBLE_LOW (operands[1]) < 0)))
2621 abort ();
2623 /* Check if GCC is setting up a block move that will end up using FP
2624 registers as temporaries. We must make sure this is acceptable. */
2625 if (GET_CODE (operands[0]) == MEM
2626 && GET_CODE (operands[1]) == MEM
2627 && mode == DImode
2628 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
2629 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
2630 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
2631 ? 32 : MEM_ALIGN (operands[0])))
2632 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
2633 ? 32
2634 : MEM_ALIGN (operands[1]))))
2635 && ! MEM_VOLATILE_P (operands [0])
2636 && ! MEM_VOLATILE_P (operands [1]))
2638 emit_move_insn (adjust_address (operands[0], SImode, 0),
2639 adjust_address (operands[1], SImode, 0));
2640 emit_move_insn (adjust_address (operands[0], SImode, 4),
2641 adjust_address (operands[1], SImode, 4));
2642 return;
2645 if (!no_new_pseudos)
2647 if (GET_CODE (operands[1]) == MEM && optimize > 0
2648 && (mode == QImode || mode == HImode || mode == SImode)
2649 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2651 rtx reg = gen_reg_rtx (word_mode);
2653 emit_insn (gen_rtx_SET (word_mode, reg,
2654 gen_rtx_ZERO_EXTEND (word_mode,
2655 operands[1])));
2656 operands[1] = gen_lowpart (mode, reg);
2658 if (GET_CODE (operands[0]) != REG)
2659 operands[1] = force_reg (mode, operands[1]);
2662 if (mode == SFmode && ! TARGET_POWERPC
2663 && TARGET_HARD_FLOAT && TARGET_FPRS
2664 && GET_CODE (operands[0]) == MEM)
2666 int regnum;
2668 if (reload_in_progress || reload_completed)
2669 regnum = true_regnum (operands[1]);
2670 else if (GET_CODE (operands[1]) == REG)
2671 regnum = REGNO (operands[1]);
2672 else
2673 regnum = -1;
2675 /* If operands[1] is a register, on POWER it may have
2676 double-precision data in it, so truncate it to single
2677 precision. */
2678 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
2680 rtx newreg;
2681 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
2682 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
2683 operands[1] = newreg;
2687 /* Handle the case where reload calls us with an invalid address;
2688 and the case of CONSTANT_P_RTX. */
2689 if (!ALTIVEC_VECTOR_MODE (mode)
2690 && (! general_operand (operands[1], mode)
2691 || ! nonimmediate_operand (operands[0], mode)
2692 || GET_CODE (operands[1]) == CONSTANT_P_RTX))
2694 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2695 return;
2698 /* FIXME: In the long term, this switch statement should go away
2699 and be replaced by a sequence of tests based on things like
2700 mode == Pmode. */
2701 switch (mode)
2703 case HImode:
2704 case QImode:
2705 if (CONSTANT_P (operands[1])
2706 && GET_CODE (operands[1]) != CONST_INT)
2707 operands[1] = force_const_mem (mode, operands[1]);
2708 break;
2710 case TFmode:
2711 case DFmode:
2712 case SFmode:
2713 if (CONSTANT_P (operands[1])
2714 && ! easy_fp_constant (operands[1], mode))
2715 operands[1] = force_const_mem (mode, operands[1]);
2716 break;
2718 case V16QImode:
2719 case V8HImode:
2720 case V4SFmode:
2721 case V4SImode:
2722 case V4HImode:
2723 case V2SFmode:
2724 case V2SImode:
2725 case V1DImode:
2726 if (CONSTANT_P (operands[1])
2727 && !easy_vector_constant (operands[1]))
2728 operands[1] = force_const_mem (mode, operands[1]);
2729 break;
2731 case SImode:
2732 case DImode:
2733 /* Use default pattern for address of ELF small data */
2734 if (TARGET_ELF
2735 && mode == Pmode
2736 && DEFAULT_ABI == ABI_V4
2737 && (GET_CODE (operands[1]) == SYMBOL_REF
2738 || GET_CODE (operands[1]) == CONST)
2739 && small_data_operand (operands[1], mode))
2741 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2742 return;
2745 if (DEFAULT_ABI == ABI_V4
2746 && mode == Pmode && mode == SImode
2747 && flag_pic == 1 && got_operand (operands[1], mode))
2749 emit_insn (gen_movsi_got (operands[0], operands[1]));
2750 return;
2753 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
2754 && TARGET_NO_TOC && ! flag_pic
2755 && mode == Pmode
2756 && CONSTANT_P (operands[1])
2757 && GET_CODE (operands[1]) != HIGH
2758 && GET_CODE (operands[1]) != CONST_INT)
2760 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
2762 /* If this is a function address on -mcall-aixdesc,
2763 convert it to the address of the descriptor. */
2764 if (DEFAULT_ABI == ABI_AIX
2765 && GET_CODE (operands[1]) == SYMBOL_REF
2766 && XSTR (operands[1], 0)[0] == '.')
2768 const char *name = XSTR (operands[1], 0);
2769 rtx new_ref;
2770 while (*name == '.')
2771 name++;
2772 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
2773 CONSTANT_POOL_ADDRESS_P (new_ref)
2774 = CONSTANT_POOL_ADDRESS_P (operands[1]);
2775 SYMBOL_REF_FLAG (new_ref) = SYMBOL_REF_FLAG (operands[1]);
2776 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
2777 operands[1] = new_ref;
2780 if (DEFAULT_ABI == ABI_DARWIN)
2782 emit_insn (gen_macho_high (target, operands[1]));
2783 emit_insn (gen_macho_low (operands[0], target, operands[1]));
2784 return;
2787 emit_insn (gen_elf_high (target, operands[1]));
2788 emit_insn (gen_elf_low (operands[0], target, operands[1]));
2789 return;
2792 /* If this is a SYMBOL_REF that refers to a constant pool entry,
2793 and we have put it in the TOC, we just need to make a TOC-relative
2794 reference to it. */
2795 if (TARGET_TOC
2796 && GET_CODE (operands[1]) == SYMBOL_REF
2797 && CONSTANT_POOL_EXPR_P (operands[1])
2798 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
2799 get_pool_mode (operands[1])))
2801 operands[1] = create_TOC_reference (operands[1]);
2803 else if (mode == Pmode
2804 && CONSTANT_P (operands[1])
2805 && ((GET_CODE (operands[1]) != CONST_INT
2806 && ! easy_fp_constant (operands[1], mode))
2807 || (GET_CODE (operands[1]) == CONST_INT
2808 && num_insns_constant (operands[1], mode) > 2)
2809 || (GET_CODE (operands[0]) == REG
2810 && FP_REGNO_P (REGNO (operands[0]))))
2811 && GET_CODE (operands[1]) != HIGH
2812 && ! LEGITIMATE_CONSTANT_POOL_ADDRESS_P (operands[1])
2813 && ! TOC_RELATIVE_EXPR_P (operands[1]))
2815 /* Emit a USE operation so that the constant isn't deleted if
2816 expensive optimizations are turned on because nobody
2817 references it. This should only be done for operands that
2818 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
2819 This should not be done for operands that contain LABEL_REFs.
2820 For now, we just handle the obvious case. */
2821 if (GET_CODE (operands[1]) != LABEL_REF)
2822 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
2824 #if TARGET_MACHO
2825 /* Darwin uses a special PIC legitimizer. */
2826 if (DEFAULT_ABI == ABI_DARWIN && flag_pic)
2828 operands[1] =
2829 rs6000_machopic_legitimize_pic_address (operands[1], mode,
2830 operands[0]);
2831 if (operands[0] != operands[1])
2832 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2833 return;
2835 #endif
2837 /* If we are to limit the number of things we put in the TOC and
2838 this is a symbol plus a constant we can add in one insn,
2839 just put the symbol in the TOC and add the constant. Don't do
2840 this if reload is in progress. */
2841 if (GET_CODE (operands[1]) == CONST
2842 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
2843 && GET_CODE (XEXP (operands[1], 0)) == PLUS
2844 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
2845 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
2846 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
2847 && ! side_effects_p (operands[0]))
2849 rtx sym =
2850 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
2851 rtx other = XEXP (XEXP (operands[1], 0), 1);
2853 sym = force_reg (mode, sym);
2854 if (mode == SImode)
2855 emit_insn (gen_addsi3 (operands[0], sym, other));
2856 else
2857 emit_insn (gen_adddi3 (operands[0], sym, other));
2858 return;
2861 operands[1] = force_const_mem (mode, operands[1]);
2863 if (TARGET_TOC
2864 && CONSTANT_POOL_EXPR_P (XEXP (operands[1], 0))
2865 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
2866 get_pool_constant (XEXP (operands[1], 0)),
2867 get_pool_mode (XEXP (operands[1], 0))))
2869 operands[1]
2870 = gen_rtx_MEM (mode,
2871 create_TOC_reference (XEXP (operands[1], 0)));
2872 set_mem_alias_set (operands[1], get_TOC_alias_set ());
2873 RTX_UNCHANGING_P (operands[1]) = 1;
2876 break;
2878 case TImode:
2879 if (GET_CODE (operands[0]) == MEM
2880 && GET_CODE (XEXP (operands[0], 0)) != REG
2881 && ! reload_in_progress)
2882 operands[0]
2883 = replace_equiv_address (operands[0],
2884 copy_addr_to_reg (XEXP (operands[0], 0)));
2886 if (GET_CODE (operands[1]) == MEM
2887 && GET_CODE (XEXP (operands[1], 0)) != REG
2888 && ! reload_in_progress)
2889 operands[1]
2890 = replace_equiv_address (operands[1],
2891 copy_addr_to_reg (XEXP (operands[1], 0)));
2892 if (TARGET_POWER)
2894 emit_insn (gen_rtx_PARALLEL (VOIDmode,
2895 gen_rtvec (2,
2896 gen_rtx_SET (VOIDmode,
2897 operands[0], operands[1]),
2898 gen_rtx_CLOBBER (VOIDmode,
2899 gen_rtx_SCRATCH (SImode)))));
2900 return;
2902 break;
2904 default:
2905 abort ();
2908 /* Above, we may have called force_const_mem which may have returned
2909 an invalid address. If we can, fix this up; otherwise, reload will
2910 have to deal with it. */
2911 if (GET_CODE (operands[1]) == MEM
2912 && ! memory_address_p (mode, XEXP (operands[1], 0))
2913 && ! reload_in_progress)
2914 operands[1] = adjust_address (operands[1], mode, 0);
2916 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2917 return;
2920 /* Initialize a variable CUM of type CUMULATIVE_ARGS
2921 for a call to a function whose data type is FNTYPE.
2922 For a library call, FNTYPE is 0.
2924 For incoming args we set the number of arguments in the prototype large
2925 so we never return a PARALLEL. */
2927 void
2928 init_cumulative_args (cum, fntype, libname, incoming)
2929 CUMULATIVE_ARGS *cum;
2930 tree fntype;
2931 rtx libname ATTRIBUTE_UNUSED;
2932 int incoming;
2934 static CUMULATIVE_ARGS zero_cumulative;
2936 *cum = zero_cumulative;
2937 cum->words = 0;
2938 cum->fregno = FP_ARG_MIN_REG;
2939 cum->vregno = ALTIVEC_ARG_MIN_REG;
2940 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
2941 cum->call_cookie = CALL_NORMAL;
2942 cum->sysv_gregno = GP_ARG_MIN_REG;
2944 if (incoming)
2945 cum->nargs_prototype = 1000; /* don't return a PARALLEL */
2947 else if (cum->prototype)
2948 cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
2949 + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
2950 || RETURN_IN_MEMORY (TREE_TYPE (fntype))));
2952 else
2953 cum->nargs_prototype = 0;
2955 cum->orig_nargs = cum->nargs_prototype;
2957 /* Check for a longcall attribute. */
2958 if (fntype
2959 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
2960 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
2961 cum->call_cookie = CALL_LONG;
2963 if (TARGET_DEBUG_ARG)
2965 fprintf (stderr, "\ninit_cumulative_args:");
2966 if (fntype)
2968 tree ret_type = TREE_TYPE (fntype);
2969 fprintf (stderr, " ret code = %s,",
2970 tree_code_name[ (int)TREE_CODE (ret_type) ]);
2973 if (cum->call_cookie & CALL_LONG)
2974 fprintf (stderr, " longcall,");
2976 fprintf (stderr, " proto = %d, nargs = %d\n",
2977 cum->prototype, cum->nargs_prototype);
2981 /* If defined, a C expression which determines whether, and in which
2982 direction, to pad out an argument with extra space. The value
2983 should be of type `enum direction': either `upward' to pad above
2984 the argument, `downward' to pad below, or `none' to inhibit
2985 padding.
2987 For the AIX ABI structs are always stored left shifted in their
2988 argument slot. */
2990 enum direction
2991 function_arg_padding (mode, type)
2992 enum machine_mode mode;
2993 tree type;
2995 if (type != 0 && AGGREGATE_TYPE_P (type))
2996 return upward;
2998 /* This is the default definition. */
2999 return (! BYTES_BIG_ENDIAN
3000 ? upward
3001 : ((mode == BLKmode
3002 ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
3003 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
3004 : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
3005 ? downward : upward));
3008 /* If defined, a C expression that gives the alignment boundary, in bits,
3009 of an argument with the specified mode and type. If it is not defined,
3010 PARM_BOUNDARY is used for all arguments.
3012 V.4 wants long longs to be double word aligned. */
3015 function_arg_boundary (mode, type)
3016 enum machine_mode mode;
3017 tree type ATTRIBUTE_UNUSED;
3019 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
3020 return 64;
3021 else if (SPE_VECTOR_MODE (mode))
3022 return 64;
3023 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3024 return 128;
3025 else
3026 return PARM_BOUNDARY;
3029 /* Update the data in CUM to advance over an argument
3030 of mode MODE and data type TYPE.
3031 (TYPE is null for libcalls where that information may not be available.) */
3033 void
3034 function_arg_advance (cum, mode, type, named)
3035 CUMULATIVE_ARGS *cum;
3036 enum machine_mode mode;
3037 tree type;
3038 int named;
3040 cum->nargs_prototype--;
3042 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3044 if (cum->vregno <= ALTIVEC_ARG_MAX_REG && cum->nargs_prototype >= 0)
3045 cum->vregno++;
3046 else
3047 cum->words += RS6000_ARG_SIZE (mode, type);
3049 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
3050 && named && cum->sysv_gregno <= GP_ARG_MAX_REG)
3051 cum->sysv_gregno++;
3052 else if (DEFAULT_ABI == ABI_V4)
3054 if (TARGET_HARD_FLOAT && TARGET_FPRS
3055 && (mode == SFmode || mode == DFmode))
3057 if (cum->fregno <= FP_ARG_V4_MAX_REG)
3058 cum->fregno++;
3059 else
3061 if (mode == DFmode)
3062 cum->words += cum->words & 1;
3063 cum->words += RS6000_ARG_SIZE (mode, type);
3066 else
3068 int n_words;
3069 int gregno = cum->sysv_gregno;
3071 /* Aggregates and IEEE quad get passed by reference. */
3072 if ((type && AGGREGATE_TYPE_P (type))
3073 || mode == TFmode)
3074 n_words = 1;
3075 else
3076 n_words = RS6000_ARG_SIZE (mode, type);
3078 /* Long long and SPE vectors are put in odd registers. */
3079 if (n_words == 2 && (gregno & 1) == 0)
3080 gregno += 1;
3082 /* Long long and SPE vectors are not split between registers
3083 and stack. */
3084 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
3086 /* Long long is aligned on the stack. */
3087 if (n_words == 2)
3088 cum->words += cum->words & 1;
3089 cum->words += n_words;
3092 /* Note: continuing to accumulate gregno past when we've started
3093 spilling to the stack indicates the fact that we've started
3094 spilling to the stack to expand_builtin_saveregs. */
3095 cum->sysv_gregno = gregno + n_words;
3098 if (TARGET_DEBUG_ARG)
3100 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3101 cum->words, cum->fregno);
3102 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
3103 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
3104 fprintf (stderr, "mode = %4s, named = %d\n",
3105 GET_MODE_NAME (mode), named);
3108 else
3110 int align = (TARGET_32BIT && (cum->words & 1) != 0
3111 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3113 cum->words += align + RS6000_ARG_SIZE (mode, type);
3115 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3116 && TARGET_HARD_FLOAT && TARGET_FPRS)
3117 cum->fregno += (mode == TFmode ? 2 : 1);
3119 if (TARGET_DEBUG_ARG)
3121 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3122 cum->words, cum->fregno);
3123 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
3124 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
3125 fprintf (stderr, "named = %d, align = %d\n", named, align);
3130 /* Determine where to put an argument to a function.
3131 Value is zero to push the argument on the stack,
3132 or a hard register in which to store the argument.
3134 MODE is the argument's machine mode.
3135 TYPE is the data type of the argument (as a tree).
3136 This is null for libcalls where that information may
3137 not be available.
3138 CUM is a variable of type CUMULATIVE_ARGS which gives info about
3139 the preceding args and about the function being called.
3140 NAMED is nonzero if this argument is a named parameter
3141 (otherwise it is an extra parameter matching an ellipsis).
3143 On RS/6000 the first eight words of non-FP are normally in registers
3144 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
3145 Under V.4, the first 8 FP args are in registers.
3147 If this is floating-point and no prototype is specified, we use
3148 both an FP and integer register (or possibly FP reg and stack). Library
3149 functions (when TYPE is zero) always have the proper types for args,
3150 so we can pass the FP value just in one register. emit_library_function
3151 doesn't support PARALLEL anyway. */
3153 struct rtx_def *
3154 function_arg (cum, mode, type, named)
3155 CUMULATIVE_ARGS *cum;
3156 enum machine_mode mode;
3157 tree type;
3158 int named;
3160 enum rs6000_abi abi = DEFAULT_ABI;
3162 /* Return a marker to indicate whether CR1 needs to set or clear the
3163 bit that V.4 uses to say fp args were passed in registers.
3164 Assume that we don't need the marker for software floating point,
3165 or compiler generated library calls. */
3166 if (mode == VOIDmode)
3168 if (abi == ABI_V4
3169 && cum->nargs_prototype < 0
3170 && type && (cum->prototype || TARGET_NO_PROTOTYPE))
3172 /* For the SPE, we need to crxor CR6 always. */
3173 if (TARGET_SPE_ABI)
3174 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
3175 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
3176 return GEN_INT (cum->call_cookie
3177 | ((cum->fregno == FP_ARG_MIN_REG)
3178 ? CALL_V4_SET_FP_ARGS
3179 : CALL_V4_CLEAR_FP_ARGS));
3182 return GEN_INT (cum->call_cookie);
3185 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3187 if (named && cum->vregno <= ALTIVEC_ARG_MAX_REG)
3188 return gen_rtx_REG (mode, cum->vregno);
3189 else
3190 return NULL;
3192 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode) && named)
3194 if (cum->sysv_gregno <= GP_ARG_MAX_REG)
3195 return gen_rtx_REG (mode, cum->sysv_gregno);
3196 else
3197 return NULL;
3199 else if (abi == ABI_V4)
3201 if (TARGET_HARD_FLOAT && TARGET_FPRS
3202 && (mode == SFmode || mode == DFmode))
3204 if (cum->fregno <= FP_ARG_V4_MAX_REG)
3205 return gen_rtx_REG (mode, cum->fregno);
3206 else
3207 return NULL;
3209 else
3211 int n_words;
3212 int gregno = cum->sysv_gregno;
3214 /* Aggregates and IEEE quad get passed by reference. */
3215 if ((type && AGGREGATE_TYPE_P (type))
3216 || mode == TFmode)
3217 n_words = 1;
3218 else
3219 n_words = RS6000_ARG_SIZE (mode, type);
3221 /* Long long and SPE vectors are put in odd registers. */
3222 if (n_words == 2 && (gregno & 1) == 0)
3223 gregno += 1;
3225 /* Long long and SPE vectors are not split between registers
3226 and stack. */
3227 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
3229 /* SPE vectors in ... get split into 2 registers. */
3230 if (TARGET_SPE && TARGET_SPE_ABI
3231 && SPE_VECTOR_MODE (mode) && !named)
3233 rtx r1, r2;
3234 enum machine_mode m = SImode;
3236 r1 = gen_rtx_REG (m, gregno);
3237 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
3238 r2 = gen_rtx_REG (m, gregno + 1);
3239 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
3240 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
3242 return gen_rtx_REG (mode, gregno);
3244 else
3245 return NULL;
3248 else
3250 int align = (TARGET_32BIT && (cum->words & 1) != 0
3251 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3252 int align_words = cum->words + align;
3254 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3255 return NULL_RTX;
3257 if (USE_FP_FOR_ARG_P (*cum, mode, type))
3259 if (! type
3260 || ((cum->nargs_prototype > 0)
3261 /* IBM AIX extended its linkage convention definition always
3262 to require FP args after register save area hole on the
3263 stack. */
3264 && (DEFAULT_ABI != ABI_AIX
3265 || ! TARGET_XL_CALL
3266 || (align_words < GP_ARG_NUM_REG))))
3267 return gen_rtx_REG (mode, cum->fregno);
3269 return gen_rtx_PARALLEL (mode,
3270 gen_rtvec (2,
3271 gen_rtx_EXPR_LIST (VOIDmode,
3272 ((align_words >= GP_ARG_NUM_REG)
3273 ? NULL_RTX
3274 : (align_words
3275 + RS6000_ARG_SIZE (mode, type)
3276 > GP_ARG_NUM_REG
3277 /* If this is partially on the stack, then
3278 we only include the portion actually
3279 in registers here. */
3280 ? gen_rtx_REG (SImode,
3281 GP_ARG_MIN_REG + align_words)
3282 : gen_rtx_REG (mode,
3283 GP_ARG_MIN_REG + align_words))),
3284 const0_rtx),
3285 gen_rtx_EXPR_LIST (VOIDmode,
3286 gen_rtx_REG (mode, cum->fregno),
3287 const0_rtx)));
3289 else if (align_words < GP_ARG_NUM_REG)
3290 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
3291 else
3292 return NULL_RTX;
3296 /* For an arg passed partly in registers and partly in memory,
3297 this is the number of registers used.
3298 For args passed entirely in registers or entirely in memory, zero. */
3301 function_arg_partial_nregs (cum, mode, type, named)
3302 CUMULATIVE_ARGS *cum;
3303 enum machine_mode mode;
3304 tree type;
3305 int named ATTRIBUTE_UNUSED;
3307 if (DEFAULT_ABI == ABI_V4)
3308 return 0;
3310 if (USE_FP_FOR_ARG_P (*cum, mode, type)
3311 || USE_ALTIVEC_FOR_ARG_P (*cum, mode, type))
3313 if (cum->nargs_prototype >= 0)
3314 return 0;
3317 if (cum->words < GP_ARG_NUM_REG
3318 && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
3320 int ret = GP_ARG_NUM_REG - cum->words;
3321 if (ret && TARGET_DEBUG_ARG)
3322 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
3324 return ret;
3327 return 0;
3330 /* A C expression that indicates when an argument must be passed by
3331 reference. If nonzero for an argument, a copy of that argument is
3332 made in memory and a pointer to the argument is passed instead of
3333 the argument itself. The pointer is passed in whatever way is
3334 appropriate for passing a pointer to that type.
3336 Under V.4, structures and unions are passed by reference.
3338 As an extension to all ABIs, variable sized types are passed by
3339 reference. */
3342 function_arg_pass_by_reference (cum, mode, type, named)
3343 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
3344 enum machine_mode mode ATTRIBUTE_UNUSED;
3345 tree type;
3346 int named ATTRIBUTE_UNUSED;
3348 if (DEFAULT_ABI == ABI_V4
3349 && ((type && AGGREGATE_TYPE_P (type))
3350 || mode == TFmode))
3352 if (TARGET_DEBUG_ARG)
3353 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
3355 return 1;
3357 return type && int_size_in_bytes (type) <= 0;
3360 /* Perform any needed actions needed for a function that is receiving a
3361 variable number of arguments.
3363 CUM is as above.
3365 MODE and TYPE are the mode and type of the current parameter.
3367 PRETEND_SIZE is a variable that should be set to the amount of stack
3368 that must be pushed by the prolog to pretend that our caller pushed
3371 Normally, this macro will push all remaining incoming registers on the
3372 stack and set PRETEND_SIZE to the length of the registers pushed. */
3374 void
3375 setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
3376 CUMULATIVE_ARGS *cum;
3377 enum machine_mode mode;
3378 tree type;
3379 int *pretend_size ATTRIBUTE_UNUSED;
3380 int no_rtl;
3383 CUMULATIVE_ARGS next_cum;
3384 int reg_size = TARGET_32BIT ? 4 : 8;
3385 rtx save_area = NULL_RTX, mem;
3386 int first_reg_offset, set;
3387 tree fntype;
3388 int stdarg_p;
3390 fntype = TREE_TYPE (current_function_decl);
3391 stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
3392 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3393 != void_type_node));
3395 /* For varargs, we do not want to skip the dummy va_dcl argument.
3396 For stdargs, we do want to skip the last named argument. */
3397 next_cum = *cum;
3398 if (stdarg_p)
3399 function_arg_advance (&next_cum, mode, type, 1);
3401 if (DEFAULT_ABI == ABI_V4)
3403 /* Indicate to allocate space on the stack for varargs save area. */
3404 cfun->machine->sysv_varargs_p = 1;
3405 if (! no_rtl)
3406 save_area = plus_constant (virtual_stack_vars_rtx,
3407 - RS6000_VARARGS_SIZE);
3409 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
3411 else
3413 first_reg_offset = next_cum.words;
3414 save_area = virtual_incoming_args_rtx;
3415 cfun->machine->sysv_varargs_p = 0;
3417 if (MUST_PASS_IN_STACK (mode, type))
3418 first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
3421 set = get_varargs_alias_set ();
3422 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
3424 mem = gen_rtx_MEM (BLKmode,
3425 plus_constant (save_area,
3426 first_reg_offset * reg_size)),
3427 set_mem_alias_set (mem, set);
3428 set_mem_align (mem, BITS_PER_WORD);
3430 move_block_from_reg
3431 (GP_ARG_MIN_REG + first_reg_offset, mem,
3432 GP_ARG_NUM_REG - first_reg_offset,
3433 (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD);
3436 /* Save FP registers if needed. */
3437 if (DEFAULT_ABI == ABI_V4
3438 && TARGET_HARD_FLOAT && TARGET_FPRS
3439 && ! no_rtl
3440 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
3442 int fregno = next_cum.fregno;
3443 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
3444 rtx lab = gen_label_rtx ();
3445 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
3447 emit_jump_insn (gen_rtx_SET (VOIDmode,
3448 pc_rtx,
3449 gen_rtx_IF_THEN_ELSE (VOIDmode,
3450 gen_rtx_NE (VOIDmode, cr1,
3451 const0_rtx),
3452 gen_rtx_LABEL_REF (VOIDmode, lab),
3453 pc_rtx)));
3455 while (fregno <= FP_ARG_V4_MAX_REG)
3457 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
3458 set_mem_alias_set (mem, set);
3459 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
3460 fregno++;
3461 off += 8;
3464 emit_label (lab);
3468 /* Create the va_list data type. */
3470 tree
3471 rs6000_build_va_list ()
3473 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
3475 /* For AIX, prefer 'char *' because that's what the system
3476 header files like. */
3477 if (DEFAULT_ABI != ABI_V4)
3478 return build_pointer_type (char_type_node);
3480 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3481 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
3483 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
3484 unsigned_char_type_node);
3485 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
3486 unsigned_char_type_node);
3487 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
3488 ptr_type_node);
3489 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
3490 ptr_type_node);
3492 DECL_FIELD_CONTEXT (f_gpr) = record;
3493 DECL_FIELD_CONTEXT (f_fpr) = record;
3494 DECL_FIELD_CONTEXT (f_ovf) = record;
3495 DECL_FIELD_CONTEXT (f_sav) = record;
3497 TREE_CHAIN (record) = type_decl;
3498 TYPE_NAME (record) = type_decl;
3499 TYPE_FIELDS (record) = f_gpr;
3500 TREE_CHAIN (f_gpr) = f_fpr;
3501 TREE_CHAIN (f_fpr) = f_ovf;
3502 TREE_CHAIN (f_ovf) = f_sav;
3504 layout_type (record);
3506 /* The correct type is an array type of one element. */
3507 return build_array_type (record, build_index_type (size_zero_node));
3510 /* Implement va_start. */
3512 void
3513 rs6000_va_start (valist, nextarg)
3514 tree valist;
3515 rtx nextarg;
3517 HOST_WIDE_INT words, n_gpr, n_fpr;
3518 tree f_gpr, f_fpr, f_ovf, f_sav;
3519 tree gpr, fpr, ovf, sav, t;
3521 /* Only SVR4 needs something special. */
3522 if (DEFAULT_ABI != ABI_V4)
3524 std_expand_builtin_va_start (valist, nextarg);
3525 return;
3528 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3529 f_fpr = TREE_CHAIN (f_gpr);
3530 f_ovf = TREE_CHAIN (f_fpr);
3531 f_sav = TREE_CHAIN (f_ovf);
3533 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3534 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3535 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3536 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3537 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3539 /* Count number of gp and fp argument registers used. */
3540 words = current_function_args_info.words;
3541 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
3542 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
3544 if (TARGET_DEBUG_ARG)
3546 fputs ("va_start: words = ", stderr);
3547 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, words);
3548 fputs (", n_gpr = ", stderr);
3549 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_gpr);
3550 fputs (", n_fpr = ", stderr);
3551 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_fpr);
3552 putc ('\n', stderr);
3555 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
3556 TREE_SIDE_EFFECTS (t) = 1;
3557 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3559 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
3560 TREE_SIDE_EFFECTS (t) = 1;
3561 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3563 /* Find the overflow area. */
3564 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
3565 if (words != 0)
3566 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
3567 build_int_2 (words * UNITS_PER_WORD, 0));
3568 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3569 TREE_SIDE_EFFECTS (t) = 1;
3570 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3572 /* Find the register save area. */
3573 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
3574 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
3575 build_int_2 (-RS6000_VARARGS_SIZE, -1));
3576 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
3577 TREE_SIDE_EFFECTS (t) = 1;
3578 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3581 /* Implement va_arg. */
3584 rs6000_va_arg (valist, type)
3585 tree valist, type;
3587 tree f_gpr, f_fpr, f_ovf, f_sav;
3588 tree gpr, fpr, ovf, sav, reg, t, u;
3589 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
3590 rtx lab_false, lab_over, addr_rtx, r;
3592 if (DEFAULT_ABI != ABI_V4)
3594 /* Variable sized types are passed by reference. */
3595 if (int_size_in_bytes (type) <= 0)
3597 u = build_pointer_type (type);
3599 /* Args grow upward. */
3600 t = build (POSTINCREMENT_EXPR, TREE_TYPE (valist), valist,
3601 build_int_2 (POINTER_SIZE / BITS_PER_UNIT, 0));
3602 TREE_SIDE_EFFECTS (t) = 1;
3604 t = build1 (NOP_EXPR, build_pointer_type (u), t);
3605 TREE_SIDE_EFFECTS (t) = 1;
3607 t = build1 (INDIRECT_REF, u, t);
3608 TREE_SIDE_EFFECTS (t) = 1;
3610 return expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL);
3612 else
3613 return std_expand_builtin_va_arg (valist, type);
3616 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3617 f_fpr = TREE_CHAIN (f_gpr);
3618 f_ovf = TREE_CHAIN (f_fpr);
3619 f_sav = TREE_CHAIN (f_ovf);
3621 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3622 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3623 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3624 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3625 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3627 size = int_size_in_bytes (type);
3628 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3630 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
3632 /* Aggregates and long doubles are passed by reference. */
3633 indirect_p = 1;
3634 reg = gpr;
3635 n_reg = 1;
3636 sav_ofs = 0;
3637 sav_scale = 4;
3638 size = UNITS_PER_WORD;
3639 rsize = 1;
3641 else if (FLOAT_TYPE_P (type) && TARGET_HARD_FLOAT && TARGET_FPRS)
3643 /* FP args go in FP registers, if present. */
3644 indirect_p = 0;
3645 reg = fpr;
3646 n_reg = 1;
3647 sav_ofs = 8*4;
3648 sav_scale = 8;
3650 else
3652 /* Otherwise into GP registers. */
3653 indirect_p = 0;
3654 reg = gpr;
3655 n_reg = rsize;
3656 sav_ofs = 0;
3657 sav_scale = 4;
3660 /* Pull the value out of the saved registers ... */
3662 lab_false = gen_label_rtx ();
3663 lab_over = gen_label_rtx ();
3664 addr_rtx = gen_reg_rtx (Pmode);
3666 /* AltiVec vectors never go in registers. */
3667 if (!TARGET_ALTIVEC || TREE_CODE (type) != VECTOR_TYPE)
3669 TREE_THIS_VOLATILE (reg) = 1;
3670 emit_cmp_and_jump_insns
3671 (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
3672 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
3673 lab_false);
3675 /* Long long is aligned in the registers. */
3676 if (n_reg > 1)
3678 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
3679 build_int_2 (n_reg - 1, 0));
3680 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
3681 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
3682 TREE_SIDE_EFFECTS (u) = 1;
3683 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
3686 if (sav_ofs)
3687 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
3688 else
3689 t = sav;
3691 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
3692 build_int_2 (n_reg, 0));
3693 TREE_SIDE_EFFECTS (u) = 1;
3695 u = build1 (CONVERT_EXPR, integer_type_node, u);
3696 TREE_SIDE_EFFECTS (u) = 1;
3698 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
3699 TREE_SIDE_EFFECTS (u) = 1;
3701 t = build (PLUS_EXPR, ptr_type_node, t, u);
3702 TREE_SIDE_EFFECTS (t) = 1;
3704 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3705 if (r != addr_rtx)
3706 emit_move_insn (addr_rtx, r);
3708 emit_jump_insn (gen_jump (lab_over));
3709 emit_barrier ();
3712 emit_label (lab_false);
3714 /* ... otherwise out of the overflow area. */
3716 /* Make sure we don't find reg 7 for the next int arg.
3718 All AltiVec vectors go in the overflow area. So in the AltiVec
3719 case we need to get the vectors from the overflow area, but
3720 remember where the GPRs and FPRs are. */
3721 if (n_reg > 1 && (TREE_CODE (type) != VECTOR_TYPE
3722 || !TARGET_ALTIVEC))
3724 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
3725 TREE_SIDE_EFFECTS (t) = 1;
3726 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3729 /* Care for on-stack alignment if needed. */
3730 if (rsize <= 1)
3731 t = ovf;
3732 else
3734 int align;
3736 /* AltiVec vectors are 16 byte aligned. */
3737 if (TARGET_ALTIVEC && TREE_CODE (type) == VECTOR_TYPE)
3738 align = 15;
3739 else
3740 align = 7;
3742 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
3743 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
3745 t = save_expr (t);
3747 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3748 if (r != addr_rtx)
3749 emit_move_insn (addr_rtx, r);
3751 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
3752 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3753 TREE_SIDE_EFFECTS (t) = 1;
3754 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3756 emit_label (lab_over);
3758 if (indirect_p)
3760 r = gen_rtx_MEM (Pmode, addr_rtx);
3761 set_mem_alias_set (r, get_varargs_alias_set ());
3762 emit_move_insn (addr_rtx, r);
3765 return addr_rtx;
3768 /* Builtins. */
3770 #define def_builtin(MASK, NAME, TYPE, CODE) \
3771 do { \
3772 if ((MASK) & target_flags) \
3773 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
3774 NULL, NULL_TREE); \
3775 } while (0)
3777 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
3779 static const struct builtin_description bdesc_3arg[] =
3781 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
3782 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
3783 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
3784 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
3785 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
3786 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
3787 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
3788 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
3789 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
3790 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
3791 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
3792 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
3793 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
3794 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
3795 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
3796 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
3797 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
3798 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
3799 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
3800 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
3801 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
3802 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
3803 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
3806 /* DST operations: void foo (void *, const int, const char). */
3808 static const struct builtin_description bdesc_dst[] =
3810 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
3811 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
3812 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
3813 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
3816 /* Simple binary operations: VECc = foo (VECa, VECb). */
3818 static struct builtin_description bdesc_2arg[] =
3820 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
3821 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
3822 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
3823 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
3824 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
3825 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
3826 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
3827 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
3828 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
3829 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
3830 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
3831 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
3832 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
3833 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
3834 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
3835 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
3836 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
3837 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
3838 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
3839 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
3840 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
3841 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
3842 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
3843 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
3844 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
3845 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
3846 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
3847 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
3848 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
3849 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
3850 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
3851 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
3852 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
3853 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
3854 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
3855 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
3856 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
3857 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
3858 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
3859 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
3860 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
3861 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
3862 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
3863 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
3864 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
3865 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
3866 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
3867 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
3868 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
3869 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
3870 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
3871 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
3872 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
3873 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
3874 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
3875 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
3876 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
3877 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
3878 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
3879 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
3880 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
3881 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
3882 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
3883 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
3884 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
3885 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
3886 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
3887 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
3888 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
3889 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
3890 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
3891 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
3892 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
3893 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
3894 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
3895 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
3896 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
3897 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
3898 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
3899 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
3900 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
3901 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
3902 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
3903 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
3904 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
3905 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
3906 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
3907 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
3908 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
3909 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
3910 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
3911 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
3912 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
3913 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
3914 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
3915 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
3916 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
3917 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
3918 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
3919 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
3920 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
3921 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
3922 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
3923 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
3924 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
3925 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
3926 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
3927 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
3928 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
3929 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
3930 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
3931 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
3932 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
3934 /* Place holder, leave as first spe builtin. */
3935 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
3936 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
3937 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
3938 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
3939 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
3940 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
3941 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
3942 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
3943 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
3944 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
3945 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
3946 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
3947 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
3948 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
3949 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
3950 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
3951 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
3952 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
3953 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
3954 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
3955 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
3956 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
3957 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
3958 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
3959 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
3960 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
3961 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
3962 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
3963 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
3964 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
3965 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
3966 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
3967 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
3968 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
3969 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
3970 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
3971 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
3972 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
3973 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
3974 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
3975 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
3976 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
3977 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
3978 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
3979 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
3980 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
3981 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
3982 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
3983 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
3984 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
3985 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
3986 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
3987 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
3988 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
3989 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
3990 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
3991 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
3992 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
3993 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
3994 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
3995 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
3996 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
3997 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
3998 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
3999 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
4000 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
4001 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
4002 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
4003 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
4004 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
4005 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
4006 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
4007 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
4008 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
4009 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
4010 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
4011 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
4012 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
4013 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
4014 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
4015 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
4016 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
4017 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
4018 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
4019 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
4020 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
4021 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
4022 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
4023 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
4024 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
4025 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
4026 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
4027 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
4028 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
4029 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
4030 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
4031 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
4032 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
4033 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
4034 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
4035 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
4036 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
4037 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
4038 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
4039 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
4040 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
4041 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
4042 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
4043 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
4045 /* SPE binary operations expecting a 5-bit unsigned literal. */
4046 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
4048 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
4049 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
4050 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
4051 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
4052 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
4053 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
4054 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
4055 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
4056 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
4057 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
4058 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
4059 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
4060 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
4061 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
4062 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
4063 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
4064 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
4065 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
4066 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
4067 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
4068 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
4069 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
4070 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
4071 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
4072 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
4073 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
4075 /* Place-holder. Leave as last binary SPE builtin. */
4076 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR },
4079 /* AltiVec predicates. */
4081 struct builtin_description_predicates
4083 const unsigned int mask;
4084 const enum insn_code icode;
4085 const char *opcode;
4086 const char *const name;
4087 const enum rs6000_builtins code;
4090 static const struct builtin_description_predicates bdesc_altivec_preds[] =
4092 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
4093 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
4094 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
4095 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
4096 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
4097 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
4098 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
4099 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
4100 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
4101 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
4102 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
4103 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
4104 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
4107 /* SPE predicates. */
4108 static struct builtin_description bdesc_spe_predicates[] =
4110 /* Place-holder. Leave as first. */
4111 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
4112 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
4113 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
4114 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
4115 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
4116 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
4117 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
4118 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
4119 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
4120 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
4121 /* Place-holder. Leave as last. */
4122 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
4125 /* SPE evsel predicates. */
4126 static struct builtin_description bdesc_spe_evsel[] =
4128 /* Place-holder. Leave as first. */
4129 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
4130 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
4131 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
4132 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
4133 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
4134 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
4135 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
4136 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
4137 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
4138 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
4139 /* Place-holder. Leave as last. */
4140 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
4143 /* ABS* operations. */
4145 static const struct builtin_description bdesc_abs[] =
4147 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
4148 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
4149 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
4150 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
4151 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
4152 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
4153 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
4156 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
4157 foo (VECa). */
4159 static struct builtin_description bdesc_1arg[] =
4161 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
4162 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
4163 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
4164 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
4165 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
4166 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
4167 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
4168 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
4169 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
4170 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
4171 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
4172 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
4173 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
4174 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
4175 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
4176 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
4177 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
4179 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
4180 end with SPE_BUILTIN_EVSUBFUSIAAW. */
4181 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
4182 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
4183 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
4184 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
4185 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
4186 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
4187 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
4188 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
4189 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
4190 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
4191 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
4192 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
4193 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
4194 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
4195 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
4196 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
4197 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
4198 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
4199 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
4200 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
4201 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
4202 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
4203 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
4204 { 0, CODE_FOR_spe_evneg, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
4205 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
4206 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
4207 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
4208 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
4209 { 0, CODE_FOR_spe_evsplatfi, "__builtin_spe_evsplatfi", SPE_BUILTIN_EVSPLATFI },
4210 { 0, CODE_FOR_spe_evsplati, "__builtin_spe_evsplati", SPE_BUILTIN_EVSPLATI },
4212 /* Place-holder. Leave as last unary SPE builtin. */
4213 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
4216 static rtx
4217 rs6000_expand_unop_builtin (icode, arglist, target)
4218 enum insn_code icode;
4219 tree arglist;
4220 rtx target;
4222 rtx pat;
4223 tree arg0 = TREE_VALUE (arglist);
4224 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4225 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4226 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4228 if (icode == CODE_FOR_nothing)
4229 /* Builtin not supported on this processor. */
4230 return 0;
4232 /* If we got invalid arguments bail out before generating bad rtl. */
4233 if (arg0 == error_mark_node)
4234 return const0_rtx;
4236 if (icode == CODE_FOR_altivec_vspltisb
4237 || icode == CODE_FOR_altivec_vspltish
4238 || icode == CODE_FOR_altivec_vspltisw
4239 || icode == CODE_FOR_spe_evsplatfi
4240 || icode == CODE_FOR_spe_evsplati)
4242 /* Only allow 5-bit *signed* literals. */
4243 if (GET_CODE (op0) != CONST_INT
4244 || INTVAL (op0) > 0x1f
4245 || INTVAL (op0) < -0x1f)
4247 error ("argument 1 must be a 5-bit signed literal");
4248 return const0_rtx;
4252 if (target == 0
4253 || GET_MODE (target) != tmode
4254 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4255 target = gen_reg_rtx (tmode);
4257 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4258 op0 = copy_to_mode_reg (mode0, op0);
4260 pat = GEN_FCN (icode) (target, op0);
4261 if (! pat)
4262 return 0;
4263 emit_insn (pat);
4265 return target;
4268 static rtx
4269 altivec_expand_abs_builtin (icode, arglist, target)
4270 enum insn_code icode;
4271 tree arglist;
4272 rtx target;
4274 rtx pat, scratch1, scratch2;
4275 tree arg0 = TREE_VALUE (arglist);
4276 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4277 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4278 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4280 /* If we have invalid arguments, bail out before generating bad rtl. */
4281 if (arg0 == error_mark_node)
4282 return const0_rtx;
4284 if (target == 0
4285 || GET_MODE (target) != tmode
4286 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4287 target = gen_reg_rtx (tmode);
4289 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4290 op0 = copy_to_mode_reg (mode0, op0);
4292 scratch1 = gen_reg_rtx (mode0);
4293 scratch2 = gen_reg_rtx (mode0);
4295 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
4296 if (! pat)
4297 return 0;
4298 emit_insn (pat);
4300 return target;
4303 static rtx
4304 rs6000_expand_binop_builtin (icode, arglist, target)
4305 enum insn_code icode;
4306 tree arglist;
4307 rtx target;
4309 rtx pat;
4310 tree arg0 = TREE_VALUE (arglist);
4311 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4312 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4313 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4314 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4315 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4316 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4318 if (icode == CODE_FOR_nothing)
4319 /* Builtin not supported on this processor. */
4320 return 0;
4322 /* If we got invalid arguments bail out before generating bad rtl. */
4323 if (arg0 == error_mark_node || arg1 == error_mark_node)
4324 return const0_rtx;
4326 if (icode == CODE_FOR_altivec_vcfux
4327 || icode == CODE_FOR_altivec_vcfsx
4328 || icode == CODE_FOR_altivec_vctsxs
4329 || icode == CODE_FOR_altivec_vctuxs
4330 || icode == CODE_FOR_altivec_vspltb
4331 || icode == CODE_FOR_altivec_vsplth
4332 || icode == CODE_FOR_altivec_vspltw
4333 || icode == CODE_FOR_spe_evaddiw
4334 || icode == CODE_FOR_spe_evldd
4335 || icode == CODE_FOR_spe_evldh
4336 || icode == CODE_FOR_spe_evldw
4337 || icode == CODE_FOR_spe_evlhhesplat
4338 || icode == CODE_FOR_spe_evlhhossplat
4339 || icode == CODE_FOR_spe_evlhhousplat
4340 || icode == CODE_FOR_spe_evlwhe
4341 || icode == CODE_FOR_spe_evlwhos
4342 || icode == CODE_FOR_spe_evlwhou
4343 || icode == CODE_FOR_spe_evlwhsplat
4344 || icode == CODE_FOR_spe_evlwwsplat
4345 || icode == CODE_FOR_spe_evrlwi
4346 || icode == CODE_FOR_spe_evslwi
4347 || icode == CODE_FOR_spe_evsrwis
4348 || icode == CODE_FOR_spe_evsrwiu)
4350 /* Only allow 5-bit unsigned literals. */
4351 if (TREE_CODE (arg1) != INTEGER_CST
4352 || TREE_INT_CST_LOW (arg1) & ~0x1f)
4354 error ("argument 2 must be a 5-bit unsigned literal");
4355 return const0_rtx;
4359 if (target == 0
4360 || GET_MODE (target) != tmode
4361 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4362 target = gen_reg_rtx (tmode);
4364 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4365 op0 = copy_to_mode_reg (mode0, op0);
4366 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4367 op1 = copy_to_mode_reg (mode1, op1);
4369 pat = GEN_FCN (icode) (target, op0, op1);
4370 if (! pat)
4371 return 0;
4372 emit_insn (pat);
4374 return target;
4377 static rtx
4378 altivec_expand_predicate_builtin (icode, opcode, arglist, target)
4379 enum insn_code icode;
4380 const char *opcode;
4381 tree arglist;
4382 rtx target;
4384 rtx pat, scratch;
4385 tree cr6_form = TREE_VALUE (arglist);
4386 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
4387 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4388 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4389 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4390 enum machine_mode tmode = SImode;
4391 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4392 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4393 int cr6_form_int;
4395 if (TREE_CODE (cr6_form) != INTEGER_CST)
4397 error ("argument 1 of __builtin_altivec_predicate must be a constant");
4398 return const0_rtx;
4400 else
4401 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
4403 if (mode0 != mode1)
4404 abort ();
4406 /* If we have invalid arguments, bail out before generating bad rtl. */
4407 if (arg0 == error_mark_node || arg1 == error_mark_node)
4408 return const0_rtx;
4410 if (target == 0
4411 || GET_MODE (target) != tmode
4412 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4413 target = gen_reg_rtx (tmode);
4415 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4416 op0 = copy_to_mode_reg (mode0, op0);
4417 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4418 op1 = copy_to_mode_reg (mode1, op1);
4420 scratch = gen_reg_rtx (mode0);
4422 pat = GEN_FCN (icode) (scratch, op0, op1,
4423 gen_rtx (SYMBOL_REF, Pmode, opcode));
4424 if (! pat)
4425 return 0;
4426 emit_insn (pat);
4428 /* The vec_any* and vec_all* predicates use the same opcodes for two
4429 different operations, but the bits in CR6 will be different
4430 depending on what information we want. So we have to play tricks
4431 with CR6 to get the right bits out.
4433 If you think this is disgusting, look at the specs for the
4434 AltiVec predicates. */
4436 switch (cr6_form_int)
4438 case 0:
4439 emit_insn (gen_cr6_test_for_zero (target));
4440 break;
4441 case 1:
4442 emit_insn (gen_cr6_test_for_zero_reverse (target));
4443 break;
4444 case 2:
4445 emit_insn (gen_cr6_test_for_lt (target));
4446 break;
4447 case 3:
4448 emit_insn (gen_cr6_test_for_lt_reverse (target));
4449 break;
4450 default:
4451 error ("argument 1 of __builtin_altivec_predicate is out of range");
4452 break;
4455 return target;
4458 static rtx
4459 altivec_expand_stv_builtin (icode, arglist)
4460 enum insn_code icode;
4461 tree arglist;
4463 tree arg0 = TREE_VALUE (arglist);
4464 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4465 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4466 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4467 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4468 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4469 rtx pat;
4470 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
4471 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
4472 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
4474 /* Invalid arguments. Bail before doing anything stoopid! */
4475 if (arg0 == error_mark_node
4476 || arg1 == error_mark_node
4477 || arg2 == error_mark_node)
4478 return const0_rtx;
4480 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
4481 op0 = copy_to_mode_reg (mode2, op0);
4482 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
4483 op1 = copy_to_mode_reg (mode0, op1);
4484 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
4485 op2 = copy_to_mode_reg (mode1, op2);
4487 pat = GEN_FCN (icode) (op1, op2, op0);
4488 if (pat)
4489 emit_insn (pat);
4490 return NULL_RTX;
4493 static rtx
4494 rs6000_expand_ternop_builtin (icode, arglist, target)
4495 enum insn_code icode;
4496 tree arglist;
4497 rtx target;
4499 rtx pat;
4500 tree arg0 = TREE_VALUE (arglist);
4501 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4502 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4503 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4504 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4505 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4506 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4507 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4508 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4509 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
4511 if (icode == CODE_FOR_nothing)
4512 /* Builtin not supported on this processor. */
4513 return 0;
4515 /* If we got invalid arguments bail out before generating bad rtl. */
4516 if (arg0 == error_mark_node
4517 || arg1 == error_mark_node
4518 || arg2 == error_mark_node)
4519 return const0_rtx;
4521 if (icode == CODE_FOR_altivec_vsldoi_4sf
4522 || icode == CODE_FOR_altivec_vsldoi_4si
4523 || icode == CODE_FOR_altivec_vsldoi_8hi
4524 || icode == CODE_FOR_altivec_vsldoi_16qi)
4526 /* Only allow 4-bit unsigned literals. */
4527 if (TREE_CODE (arg2) != INTEGER_CST
4528 || TREE_INT_CST_LOW (arg2) & ~0xf)
4530 error ("argument 3 must be a 4-bit unsigned literal");
4531 return const0_rtx;
4535 if (target == 0
4536 || GET_MODE (target) != tmode
4537 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4538 target = gen_reg_rtx (tmode);
4540 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4541 op0 = copy_to_mode_reg (mode0, op0);
4542 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4543 op1 = copy_to_mode_reg (mode1, op1);
4544 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
4545 op2 = copy_to_mode_reg (mode2, op2);
4547 pat = GEN_FCN (icode) (target, op0, op1, op2);
4548 if (! pat)
4549 return 0;
4550 emit_insn (pat);
4552 return target;
4555 /* Expand the lvx builtins. */
4556 static rtx
4557 altivec_expand_ld_builtin (exp, target, expandedp)
4558 tree exp;
4559 rtx target;
4560 bool *expandedp;
4562 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4563 tree arglist = TREE_OPERAND (exp, 1);
4564 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4565 tree arg0;
4566 enum machine_mode tmode, mode0;
4567 rtx pat, op0;
4568 enum insn_code icode;
4570 switch (fcode)
4572 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
4573 icode = CODE_FOR_altivec_lvx_16qi;
4574 break;
4575 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
4576 icode = CODE_FOR_altivec_lvx_8hi;
4577 break;
4578 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
4579 icode = CODE_FOR_altivec_lvx_4si;
4580 break;
4581 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
4582 icode = CODE_FOR_altivec_lvx_4sf;
4583 break;
4584 default:
4585 *expandedp = false;
4586 return NULL_RTX;
4589 *expandedp = true;
4591 arg0 = TREE_VALUE (arglist);
4592 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4593 tmode = insn_data[icode].operand[0].mode;
4594 mode0 = insn_data[icode].operand[1].mode;
4596 if (target == 0
4597 || GET_MODE (target) != tmode
4598 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4599 target = gen_reg_rtx (tmode);
4601 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4602 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
4604 pat = GEN_FCN (icode) (target, op0);
4605 if (! pat)
4606 return 0;
4607 emit_insn (pat);
4608 return target;
4611 /* Expand the stvx builtins. */
4612 static rtx
4613 altivec_expand_st_builtin (exp, target, expandedp)
4614 tree exp;
4615 rtx target ATTRIBUTE_UNUSED;
4616 bool *expandedp;
4618 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4619 tree arglist = TREE_OPERAND (exp, 1);
4620 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4621 tree arg0, arg1;
4622 enum machine_mode mode0, mode1;
4623 rtx pat, op0, op1;
4624 enum insn_code icode;
4626 switch (fcode)
4628 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
4629 icode = CODE_FOR_altivec_stvx_16qi;
4630 break;
4631 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
4632 icode = CODE_FOR_altivec_stvx_8hi;
4633 break;
4634 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
4635 icode = CODE_FOR_altivec_stvx_4si;
4636 break;
4637 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
4638 icode = CODE_FOR_altivec_stvx_4sf;
4639 break;
4640 default:
4641 *expandedp = false;
4642 return NULL_RTX;
4645 arg0 = TREE_VALUE (arglist);
4646 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4647 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4648 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4649 mode0 = insn_data[icode].operand[0].mode;
4650 mode1 = insn_data[icode].operand[1].mode;
4652 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4653 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
4654 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
4655 op1 = copy_to_mode_reg (mode1, op1);
4657 pat = GEN_FCN (icode) (op0, op1);
4658 if (pat)
4659 emit_insn (pat);
4661 *expandedp = true;
4662 return NULL_RTX;
4665 /* Expand the dst builtins. */
4666 static rtx
4667 altivec_expand_dst_builtin (exp, target, expandedp)
4668 tree exp;
4669 rtx target ATTRIBUTE_UNUSED;
4670 bool *expandedp;
4672 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4673 tree arglist = TREE_OPERAND (exp, 1);
4674 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4675 tree arg0, arg1, arg2;
4676 enum machine_mode mode0, mode1, mode2;
4677 rtx pat, op0, op1, op2;
4678 struct builtin_description *d;
4679 size_t i;
4681 *expandedp = false;
4683 /* Handle DST variants. */
4684 d = (struct builtin_description *) bdesc_dst;
4685 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
4686 if (d->code == fcode)
4688 arg0 = TREE_VALUE (arglist);
4689 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4690 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4691 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4692 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4693 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4694 mode0 = insn_data[d->icode].operand[0].mode;
4695 mode1 = insn_data[d->icode].operand[1].mode;
4696 mode2 = insn_data[d->icode].operand[2].mode;
4698 /* Invalid arguments, bail out before generating bad rtl. */
4699 if (arg0 == error_mark_node
4700 || arg1 == error_mark_node
4701 || arg2 == error_mark_node)
4702 return const0_rtx;
4704 if (TREE_CODE (arg2) != INTEGER_CST
4705 || TREE_INT_CST_LOW (arg2) & ~0x3)
4707 error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
4708 return const0_rtx;
4711 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
4712 op0 = copy_to_mode_reg (mode0, op0);
4713 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
4714 op1 = copy_to_mode_reg (mode1, op1);
4716 pat = GEN_FCN (d->icode) (op0, op1, op2);
4717 if (pat != 0)
4718 emit_insn (pat);
4720 *expandedp = true;
4721 return NULL_RTX;
4724 return NULL_RTX;
4727 /* Expand the builtin in EXP and store the result in TARGET. Store
4728 true in *EXPANDEDP if we found a builtin to expand. */
4729 static rtx
4730 altivec_expand_builtin (exp, target, expandedp)
4731 tree exp;
4732 rtx target;
4733 bool *expandedp;
4735 struct builtin_description *d;
4736 struct builtin_description_predicates *dp;
4737 size_t i;
4738 enum insn_code icode;
4739 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4740 tree arglist = TREE_OPERAND (exp, 1);
4741 tree arg0;
4742 rtx op0, pat;
4743 enum machine_mode tmode, mode0;
4744 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4746 target = altivec_expand_ld_builtin (exp, target, expandedp);
4747 if (*expandedp)
4748 return target;
4750 target = altivec_expand_st_builtin (exp, target, expandedp);
4751 if (*expandedp)
4752 return target;
4754 target = altivec_expand_dst_builtin (exp, target, expandedp);
4755 if (*expandedp)
4756 return target;
4758 *expandedp = true;
4760 switch (fcode)
4762 case ALTIVEC_BUILTIN_STVX:
4763 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
4764 case ALTIVEC_BUILTIN_STVEBX:
4765 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
4766 case ALTIVEC_BUILTIN_STVEHX:
4767 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
4768 case ALTIVEC_BUILTIN_STVEWX:
4769 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
4770 case ALTIVEC_BUILTIN_STVXL:
4771 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
4773 case ALTIVEC_BUILTIN_MFVSCR:
4774 icode = CODE_FOR_altivec_mfvscr;
4775 tmode = insn_data[icode].operand[0].mode;
4777 if (target == 0
4778 || GET_MODE (target) != tmode
4779 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4780 target = gen_reg_rtx (tmode);
4782 pat = GEN_FCN (icode) (target);
4783 if (! pat)
4784 return 0;
4785 emit_insn (pat);
4786 return target;
4788 case ALTIVEC_BUILTIN_MTVSCR:
4789 icode = CODE_FOR_altivec_mtvscr;
4790 arg0 = TREE_VALUE (arglist);
4791 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4792 mode0 = insn_data[icode].operand[0].mode;
4794 /* If we got invalid arguments bail out before generating bad rtl. */
4795 if (arg0 == error_mark_node)
4796 return const0_rtx;
4798 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4799 op0 = copy_to_mode_reg (mode0, op0);
4801 pat = GEN_FCN (icode) (op0);
4802 if (pat)
4803 emit_insn (pat);
4804 return NULL_RTX;
4806 case ALTIVEC_BUILTIN_DSSALL:
4807 emit_insn (gen_altivec_dssall ());
4808 return NULL_RTX;
4810 case ALTIVEC_BUILTIN_DSS:
4811 icode = CODE_FOR_altivec_dss;
4812 arg0 = TREE_VALUE (arglist);
4813 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4814 mode0 = insn_data[icode].operand[0].mode;
4816 /* If we got invalid arguments bail out before generating bad rtl. */
4817 if (arg0 == error_mark_node)
4818 return const0_rtx;
4820 if (TREE_CODE (arg0) != INTEGER_CST
4821 || TREE_INT_CST_LOW (arg0) & ~0x3)
4823 error ("argument to dss must be a 2-bit unsigned literal");
4824 return const0_rtx;
4827 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4828 op0 = copy_to_mode_reg (mode0, op0);
4830 emit_insn (gen_altivec_dss (op0));
4831 return NULL_RTX;
4834 /* Expand abs* operations. */
4835 d = (struct builtin_description *) bdesc_abs;
4836 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
4837 if (d->code == fcode)
4838 return altivec_expand_abs_builtin (d->icode, arglist, target);
4840 /* Expand the AltiVec predicates. */
4841 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
4842 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
4843 if (dp->code == fcode)
4844 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
4846 /* LV* are funky. We initialized them differently. */
4847 switch (fcode)
4849 case ALTIVEC_BUILTIN_LVSL:
4850 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsl,
4851 arglist, target);
4852 case ALTIVEC_BUILTIN_LVSR:
4853 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsr,
4854 arglist, target);
4855 case ALTIVEC_BUILTIN_LVEBX:
4856 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvebx,
4857 arglist, target);
4858 case ALTIVEC_BUILTIN_LVEHX:
4859 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvehx,
4860 arglist, target);
4861 case ALTIVEC_BUILTIN_LVEWX:
4862 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvewx,
4863 arglist, target);
4864 case ALTIVEC_BUILTIN_LVXL:
4865 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvxl,
4866 arglist, target);
4867 case ALTIVEC_BUILTIN_LVX:
4868 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvx,
4869 arglist, target);
4870 default:
4871 break;
4872 /* Fall through. */
4875 *expandedp = false;
4876 return NULL_RTX;
4879 /* Binops that need to be initialized manually, but can be expanded
4880 automagically by rs6000_expand_binop_builtin. */
4881 static struct builtin_description bdesc_2arg_spe[] =
4883 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
4884 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
4885 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
4886 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
4887 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
4888 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
4889 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
4890 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
4891 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
4892 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
4893 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
4894 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
4895 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
4896 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
4897 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
4898 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
4899 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
4900 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
4901 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
4902 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
4903 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
4904 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
4907 /* Expand the builtin in EXP and store the result in TARGET. Store
4908 true in *EXPANDEDP if we found a builtin to expand.
4910 This expands the SPE builtins that are not simple unary and binary
4911 operations. */
4912 static rtx
4913 spe_expand_builtin (exp, target, expandedp)
4914 tree exp;
4915 rtx target;
4916 bool *expandedp;
4918 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4919 tree arglist = TREE_OPERAND (exp, 1);
4920 tree arg1, arg0;
4921 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4922 enum insn_code icode;
4923 enum machine_mode tmode, mode0;
4924 rtx pat, op0;
4925 struct builtin_description *d;
4926 size_t i;
4928 *expandedp = true;
4930 /* Syntax check for a 5-bit unsigned immediate. */
4931 switch (fcode)
4933 case SPE_BUILTIN_EVSTDD:
4934 case SPE_BUILTIN_EVSTDH:
4935 case SPE_BUILTIN_EVSTDW:
4936 case SPE_BUILTIN_EVSTWHE:
4937 case SPE_BUILTIN_EVSTWHO:
4938 case SPE_BUILTIN_EVSTWWE:
4939 case SPE_BUILTIN_EVSTWWO:
4940 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4941 if (TREE_CODE (arg1) != INTEGER_CST
4942 || TREE_INT_CST_LOW (arg1) & ~0x1f)
4944 error ("argument 2 must be a 5-bit unsigned literal");
4945 return const0_rtx;
4947 break;
4948 default:
4949 break;
4952 d = (struct builtin_description *) bdesc_2arg_spe;
4953 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
4954 if (d->code == fcode)
4955 return rs6000_expand_binop_builtin (d->icode, arglist, target);
4957 d = (struct builtin_description *) bdesc_spe_predicates;
4958 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
4959 if (d->code == fcode)
4960 return spe_expand_predicate_builtin (d->icode, arglist, target);
4962 d = (struct builtin_description *) bdesc_spe_evsel;
4963 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
4964 if (d->code == fcode)
4965 return spe_expand_evsel_builtin (d->icode, arglist, target);
4967 switch (fcode)
4969 case SPE_BUILTIN_EVSTDDX:
4970 return altivec_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
4971 case SPE_BUILTIN_EVSTDHX:
4972 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
4973 case SPE_BUILTIN_EVSTDWX:
4974 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
4975 case SPE_BUILTIN_EVSTWHEX:
4976 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
4977 case SPE_BUILTIN_EVSTWHOX:
4978 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
4979 case SPE_BUILTIN_EVSTWWEX:
4980 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
4981 case SPE_BUILTIN_EVSTWWOX:
4982 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
4983 case SPE_BUILTIN_EVSTDD:
4984 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
4985 case SPE_BUILTIN_EVSTDH:
4986 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
4987 case SPE_BUILTIN_EVSTDW:
4988 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
4989 case SPE_BUILTIN_EVSTWHE:
4990 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
4991 case SPE_BUILTIN_EVSTWHO:
4992 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
4993 case SPE_BUILTIN_EVSTWWE:
4994 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
4995 case SPE_BUILTIN_EVSTWWO:
4996 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
4997 case SPE_BUILTIN_MFSPEFSCR:
4998 icode = CODE_FOR_spe_mfspefscr;
4999 tmode = insn_data[icode].operand[0].mode;
5001 if (target == 0
5002 || GET_MODE (target) != tmode
5003 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5004 target = gen_reg_rtx (tmode);
5006 pat = GEN_FCN (icode) (target);
5007 if (! pat)
5008 return 0;
5009 emit_insn (pat);
5010 return target;
5011 case SPE_BUILTIN_MTSPEFSCR:
5012 icode = CODE_FOR_spe_mtspefscr;
5013 arg0 = TREE_VALUE (arglist);
5014 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5015 mode0 = insn_data[icode].operand[0].mode;
5017 if (arg0 == error_mark_node)
5018 return const0_rtx;
5020 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5021 op0 = copy_to_mode_reg (mode0, op0);
5023 pat = GEN_FCN (icode) (op0);
5024 if (pat)
5025 emit_insn (pat);
5026 return NULL_RTX;
5027 default:
5028 break;
5031 *expandedp = false;
5032 return NULL_RTX;
5035 static rtx
5036 spe_expand_predicate_builtin (icode, arglist, target)
5037 enum insn_code icode;
5038 tree arglist;
5039 rtx target;
5041 rtx pat, scratch, tmp;
5042 tree form = TREE_VALUE (arglist);
5043 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
5044 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5045 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5046 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5047 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5048 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5049 int form_int;
5050 enum rtx_code code;
5052 if (TREE_CODE (form) != INTEGER_CST)
5054 error ("argument 1 of __builtin_spe_predicate must be a constant");
5055 return const0_rtx;
5057 else
5058 form_int = TREE_INT_CST_LOW (form);
5060 if (mode0 != mode1)
5061 abort ();
5063 if (arg0 == error_mark_node || arg1 == error_mark_node)
5064 return const0_rtx;
5066 if (target == 0
5067 || GET_MODE (target) != SImode
5068 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
5069 target = gen_reg_rtx (SImode);
5071 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5072 op0 = copy_to_mode_reg (mode0, op0);
5073 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5074 op1 = copy_to_mode_reg (mode1, op1);
5076 scratch = gen_reg_rtx (CCmode);
5078 pat = GEN_FCN (icode) (scratch, op0, op1);
5079 if (! pat)
5080 return const0_rtx;
5081 emit_insn (pat);
5083 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
5084 _lower_. We use one compare, but look in different bits of the
5085 CR for each variant.
5087 There are 2 elements in each SPE simd type (upper/lower). The CR
5088 bits are set as follows:
5090 BIT0 | BIT 1 | BIT 2 | BIT 3
5091 U | L | (U | L) | (U & L)
5093 So, for an "all" relationship, BIT 3 would be set.
5094 For an "any" relationship, BIT 2 would be set. Etc.
5096 Following traditional nomenclature, these bits map to:
5098 BIT0 | BIT 1 | BIT 2 | BIT 3
5099 LT | GT | EQ | OV
5101 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
5104 switch (form_int)
5106 /* All variant. OV bit. */
5107 case 0:
5108 /* We need to get to the OV bit, which is the ORDERED bit. We
5109 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
5110 that's ugly and will trigger a validate_condition_mode abort.
5111 So let's just use another pattern. */
5112 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
5113 return target;
5114 /* Any variant. EQ bit. */
5115 case 1:
5116 code = EQ;
5117 break;
5118 /* Upper variant. LT bit. */
5119 case 2:
5120 code = LT;
5121 break;
5122 /* Lower variant. GT bit. */
5123 case 3:
5124 code = GT;
5125 break;
5126 default:
5127 error ("argument 1 of __builtin_spe_predicate is out of range");
5128 return const0_rtx;
5131 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
5132 emit_move_insn (target, tmp);
5134 return target;
5137 /* The evsel builtins look like this:
5139 e = __builtin_spe_evsel_OP (a, b, c, d);
5141 and work like this:
5143 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
5144 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
5147 static rtx
5148 spe_expand_evsel_builtin (icode, arglist, target)
5149 enum insn_code icode;
5150 tree arglist;
5151 rtx target;
5153 rtx pat, scratch;
5154 tree arg0 = TREE_VALUE (arglist);
5155 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5156 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5157 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
5158 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5159 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5160 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5161 rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
5162 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5163 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5165 if (mode0 != mode1)
5166 abort ();
5168 if (arg0 == error_mark_node || arg1 == error_mark_node
5169 || arg2 == error_mark_node || arg3 == error_mark_node)
5170 return const0_rtx;
5172 if (target == 0
5173 || GET_MODE (target) != mode0
5174 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
5175 target = gen_reg_rtx (mode0);
5177 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5178 op0 = copy_to_mode_reg (mode0, op0);
5179 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
5180 op1 = copy_to_mode_reg (mode0, op1);
5181 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
5182 op2 = copy_to_mode_reg (mode0, op2);
5183 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
5184 op3 = copy_to_mode_reg (mode0, op3);
5186 /* Generate the compare. */
5187 scratch = gen_reg_rtx (CCmode);
5188 pat = GEN_FCN (icode) (scratch, op0, op1);
5189 if (! pat)
5190 return const0_rtx;
5191 emit_insn (pat);
5193 if (mode0 == V2SImode)
5194 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
5195 else
5196 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
5198 return target;
5201 /* Expand an expression EXP that calls a built-in function,
5202 with result going to TARGET if that's convenient
5203 (and in mode MODE if that's convenient).
5204 SUBTARGET may be used as the target for computing one of EXP's operands.
5205 IGNORE is nonzero if the value is to be ignored. */
5207 static rtx
5208 rs6000_expand_builtin (exp, target, subtarget, mode, ignore)
5209 tree exp;
5210 rtx target;
5211 rtx subtarget ATTRIBUTE_UNUSED;
5212 enum machine_mode mode ATTRIBUTE_UNUSED;
5213 int ignore ATTRIBUTE_UNUSED;
5215 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5216 tree arglist = TREE_OPERAND (exp, 1);
5217 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5218 struct builtin_description *d;
5219 size_t i;
5220 rtx ret;
5221 bool success;
5223 if (TARGET_ALTIVEC)
5225 ret = altivec_expand_builtin (exp, target, &success);
5227 if (success)
5228 return ret;
5230 if (TARGET_SPE)
5232 ret = spe_expand_builtin (exp, target, &success);
5234 if (success)
5235 return ret;
5238 if (TARGET_ALTIVEC || TARGET_SPE)
5240 /* Handle simple unary operations. */
5241 d = (struct builtin_description *) bdesc_1arg;
5242 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
5243 if (d->code == fcode)
5244 return rs6000_expand_unop_builtin (d->icode, arglist, target);
5246 /* Handle simple binary operations. */
5247 d = (struct builtin_description *) bdesc_2arg;
5248 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
5249 if (d->code == fcode)
5250 return rs6000_expand_binop_builtin (d->icode, arglist, target);
5252 /* Handle simple ternary operations. */
5253 d = (struct builtin_description *) bdesc_3arg;
5254 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
5255 if (d->code == fcode)
5256 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
5259 abort ();
5260 return NULL_RTX;
5263 static void
5264 rs6000_init_builtins ()
5266 if (TARGET_SPE)
5267 spe_init_builtins ();
5268 if (TARGET_ALTIVEC)
5269 altivec_init_builtins ();
5270 if (TARGET_ALTIVEC || TARGET_SPE)
5271 rs6000_common_init_builtins ();
5274 /* Search through a set of builtins and enable the mask bits.
5275 DESC is an array of builtins.
5276 SIZE is the total number of builtins.
5277 START is the builtin enum at which to start.
5278 END is the builtin enum at which to end. */
5279 static void
5280 enable_mask_for_builtins (desc, size, start, end)
5281 struct builtin_description *desc;
5282 int size;
5283 enum rs6000_builtins start, end;
5285 int i;
5287 for (i = 0; i < size; ++i)
5288 if (desc[i].code == start)
5289 break;
5291 if (i == size)
5292 return;
5294 for (; i < size; ++i)
5296 /* Flip all the bits on. */
5297 desc[i].mask = target_flags;
5298 if (desc[i].code == end)
5299 break;
5303 static void
5304 spe_init_builtins ()
5306 tree endlink = void_list_node;
5307 tree puint_type_node = build_pointer_type (unsigned_type_node);
5308 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
5309 tree pv2si_type_node = build_pointer_type (V2SI_type_node);
5310 struct builtin_description *d;
5311 size_t i;
5313 tree v2si_ftype_4_v2si
5314 = build_function_type
5315 (V2SI_type_node,
5316 tree_cons (NULL_TREE, V2SI_type_node,
5317 tree_cons (NULL_TREE, V2SI_type_node,
5318 tree_cons (NULL_TREE, V2SI_type_node,
5319 tree_cons (NULL_TREE, V2SI_type_node,
5320 endlink)))));
5322 tree v2sf_ftype_4_v2sf
5323 = build_function_type
5324 (V2SF_type_node,
5325 tree_cons (NULL_TREE, V2SF_type_node,
5326 tree_cons (NULL_TREE, V2SF_type_node,
5327 tree_cons (NULL_TREE, V2SF_type_node,
5328 tree_cons (NULL_TREE, V2SF_type_node,
5329 endlink)))));
5331 tree int_ftype_int_v2si_v2si
5332 = build_function_type
5333 (integer_type_node,
5334 tree_cons (NULL_TREE, integer_type_node,
5335 tree_cons (NULL_TREE, V2SI_type_node,
5336 tree_cons (NULL_TREE, V2SI_type_node,
5337 endlink))));
5339 tree int_ftype_int_v2sf_v2sf
5340 = build_function_type
5341 (integer_type_node,
5342 tree_cons (NULL_TREE, integer_type_node,
5343 tree_cons (NULL_TREE, V2SF_type_node,
5344 tree_cons (NULL_TREE, V2SF_type_node,
5345 endlink))));
5347 tree void_ftype_v2si_puint_int
5348 = build_function_type (void_type_node,
5349 tree_cons (NULL_TREE, V2SI_type_node,
5350 tree_cons (NULL_TREE, puint_type_node,
5351 tree_cons (NULL_TREE,
5352 integer_type_node,
5353 endlink))));
5355 tree void_ftype_v2si_puint_char
5356 = build_function_type (void_type_node,
5357 tree_cons (NULL_TREE, V2SI_type_node,
5358 tree_cons (NULL_TREE, puint_type_node,
5359 tree_cons (NULL_TREE,
5360 char_type_node,
5361 endlink))));
5363 tree void_ftype_v2si_pv2si_int
5364 = build_function_type (void_type_node,
5365 tree_cons (NULL_TREE, V2SI_type_node,
5366 tree_cons (NULL_TREE, pv2si_type_node,
5367 tree_cons (NULL_TREE,
5368 integer_type_node,
5369 endlink))));
5371 tree void_ftype_v2si_pv2si_char
5372 = build_function_type (void_type_node,
5373 tree_cons (NULL_TREE, V2SI_type_node,
5374 tree_cons (NULL_TREE, pv2si_type_node,
5375 tree_cons (NULL_TREE,
5376 char_type_node,
5377 endlink))));
5379 tree void_ftype_int
5380 = build_function_type (void_type_node,
5381 tree_cons (NULL_TREE, integer_type_node, endlink));
5383 tree int_ftype_void
5384 = build_function_type (integer_type_node,
5385 tree_cons (NULL_TREE, void_type_node, endlink));
5387 tree v2si_ftype_pv2si_int
5388 = build_function_type (V2SI_type_node,
5389 tree_cons (NULL_TREE, pv2si_type_node,
5390 tree_cons (NULL_TREE, integer_type_node,
5391 endlink)));
5393 tree v2si_ftype_puint_int
5394 = build_function_type (V2SI_type_node,
5395 tree_cons (NULL_TREE, puint_type_node,
5396 tree_cons (NULL_TREE, integer_type_node,
5397 endlink)));
5399 tree v2si_ftype_pushort_int
5400 = build_function_type (V2SI_type_node,
5401 tree_cons (NULL_TREE, pushort_type_node,
5402 tree_cons (NULL_TREE, integer_type_node,
5403 endlink)));
5405 /* The initialization of the simple binary and unary builtins is
5406 done in rs6000_common_init_builtins, but we have to enable the
5407 mask bits here manually because we have run out of `target_flags'
5408 bits. We really need to redesign this mask business. */
5410 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
5411 ARRAY_SIZE (bdesc_2arg),
5412 SPE_BUILTIN_EVADDW,
5413 SPE_BUILTIN_EVXOR);
5414 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
5415 ARRAY_SIZE (bdesc_1arg),
5416 SPE_BUILTIN_EVABS,
5417 SPE_BUILTIN_EVSUBFUSIAAW);
5418 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
5419 ARRAY_SIZE (bdesc_spe_predicates),
5420 SPE_BUILTIN_EVCMPEQ,
5421 SPE_BUILTIN_EVFSTSTLT);
5422 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
5423 ARRAY_SIZE (bdesc_spe_evsel),
5424 SPE_BUILTIN_EVSEL_CMPGTS,
5425 SPE_BUILTIN_EVSEL_FSTSTEQ);
5427 /* Initialize irregular SPE builtins. */
5429 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
5430 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
5431 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
5432 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
5433 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
5434 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
5435 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
5436 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
5437 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
5438 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
5439 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
5440 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
5441 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
5442 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
5443 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
5444 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
5446 /* Loads. */
5447 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
5448 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
5449 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
5450 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
5451 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
5452 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
5453 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
5454 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
5455 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
5456 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
5457 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
5458 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
5459 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
5460 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
5461 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
5462 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
5463 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
5464 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
5465 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
5466 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
5467 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
5468 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
5470 /* Predicates. */
5471 d = (struct builtin_description *) bdesc_spe_predicates;
5472 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
5474 tree type;
5476 switch (insn_data[d->icode].operand[1].mode)
5478 case V2SImode:
5479 type = int_ftype_int_v2si_v2si;
5480 break;
5481 case V2SFmode:
5482 type = int_ftype_int_v2sf_v2sf;
5483 break;
5484 default:
5485 abort ();
5488 def_builtin (d->mask, d->name, type, d->code);
5491 /* Evsel predicates. */
5492 d = (struct builtin_description *) bdesc_spe_evsel;
5493 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
5495 tree type;
5497 switch (insn_data[d->icode].operand[1].mode)
5499 case V2SImode:
5500 type = v2si_ftype_4_v2si;
5501 break;
5502 case V2SFmode:
5503 type = v2sf_ftype_4_v2sf;
5504 break;
5505 default:
5506 abort ();
5509 def_builtin (d->mask, d->name, type, d->code);
5513 static void
5514 altivec_init_builtins ()
5516 struct builtin_description *d;
5517 struct builtin_description_predicates *dp;
5518 size_t i;
5519 tree pfloat_type_node = build_pointer_type (float_type_node);
5520 tree pint_type_node = build_pointer_type (integer_type_node);
5521 tree pshort_type_node = build_pointer_type (short_integer_type_node);
5522 tree pchar_type_node = build_pointer_type (char_type_node);
5524 tree pvoid_type_node = build_pointer_type (void_type_node);
5526 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
5527 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
5528 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
5529 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
5531 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
5533 tree int_ftype_int_v4si_v4si
5534 = build_function_type_list (integer_type_node,
5535 integer_type_node, V4SI_type_node,
5536 V4SI_type_node, NULL_TREE);
5537 tree v4sf_ftype_pcfloat
5538 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
5539 tree void_ftype_pfloat_v4sf
5540 = build_function_type_list (void_type_node,
5541 pfloat_type_node, V4SF_type_node, NULL_TREE);
5542 tree v4si_ftype_pcint
5543 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
5544 tree void_ftype_pint_v4si
5545 = build_function_type_list (void_type_node,
5546 pint_type_node, V4SI_type_node, NULL_TREE);
5547 tree v8hi_ftype_pcshort
5548 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
5549 tree void_ftype_pshort_v8hi
5550 = build_function_type_list (void_type_node,
5551 pshort_type_node, V8HI_type_node, NULL_TREE);
5552 tree v16qi_ftype_pcchar
5553 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
5554 tree void_ftype_pchar_v16qi
5555 = build_function_type_list (void_type_node,
5556 pchar_type_node, V16QI_type_node, NULL_TREE);
5557 tree void_ftype_v4si
5558 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
5559 tree v8hi_ftype_void
5560 = build_function_type (V8HI_type_node, void_list_node);
5561 tree void_ftype_void
5562 = build_function_type (void_type_node, void_list_node);
5563 tree void_ftype_qi
5564 = build_function_type_list (void_type_node, char_type_node, NULL_TREE);
5566 tree v16qi_ftype_int_pcvoid
5567 = build_function_type_list (V16QI_type_node,
5568 integer_type_node, pcvoid_type_node, NULL_TREE);
5569 tree v8hi_ftype_int_pcvoid
5570 = build_function_type_list (V8HI_type_node,
5571 integer_type_node, pcvoid_type_node, NULL_TREE);
5572 tree v4si_ftype_int_pcvoid
5573 = build_function_type_list (V4SI_type_node,
5574 integer_type_node, pcvoid_type_node, NULL_TREE);
5576 tree void_ftype_v4si_int_pvoid
5577 = build_function_type_list (void_type_node,
5578 V4SI_type_node, integer_type_node,
5579 pvoid_type_node, NULL_TREE);
5580 tree void_ftype_v16qi_int_pvoid
5581 = build_function_type_list (void_type_node,
5582 V16QI_type_node, integer_type_node,
5583 pvoid_type_node, NULL_TREE);
5584 tree void_ftype_v8hi_int_pvoid
5585 = build_function_type_list (void_type_node,
5586 V8HI_type_node, integer_type_node,
5587 pvoid_type_node, NULL_TREE);
5588 tree int_ftype_int_v8hi_v8hi
5589 = build_function_type_list (integer_type_node,
5590 integer_type_node, V8HI_type_node,
5591 V8HI_type_node, NULL_TREE);
5592 tree int_ftype_int_v16qi_v16qi
5593 = build_function_type_list (integer_type_node,
5594 integer_type_node, V16QI_type_node,
5595 V16QI_type_node, NULL_TREE);
5596 tree int_ftype_int_v4sf_v4sf
5597 = build_function_type_list (integer_type_node,
5598 integer_type_node, V4SF_type_node,
5599 V4SF_type_node, NULL_TREE);
5600 tree v4si_ftype_v4si
5601 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
5602 tree v8hi_ftype_v8hi
5603 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
5604 tree v16qi_ftype_v16qi
5605 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
5606 tree v4sf_ftype_v4sf
5607 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
5608 tree void_ftype_pcvoid_int_char
5609 = build_function_type_list (void_type_node,
5610 pcvoid_type_node, integer_type_node,
5611 char_type_node, NULL_TREE);
5613 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
5614 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
5615 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
5616 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
5617 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
5618 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
5619 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
5620 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
5621 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
5622 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
5623 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
5624 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
5625 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
5626 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
5627 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
5628 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
5629 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
5630 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
5631 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
5632 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
5633 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVSL);
5634 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVSR);
5635 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEBX);
5636 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEHX);
5637 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEWX);
5638 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVXL);
5639 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVX);
5640 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVX);
5641 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVEWX);
5642 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVXL);
5643 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid, ALTIVEC_BUILTIN_STVEBX);
5644 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid, ALTIVEC_BUILTIN_STVEHX);
5646 /* Add the DST variants. */
5647 d = (struct builtin_description *) bdesc_dst;
5648 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
5649 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_char, d->code);
5651 /* Initialize the predicates. */
5652 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
5653 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
5655 enum machine_mode mode1;
5656 tree type;
5658 mode1 = insn_data[dp->icode].operand[1].mode;
5660 switch (mode1)
5662 case V4SImode:
5663 type = int_ftype_int_v4si_v4si;
5664 break;
5665 case V8HImode:
5666 type = int_ftype_int_v8hi_v8hi;
5667 break;
5668 case V16QImode:
5669 type = int_ftype_int_v16qi_v16qi;
5670 break;
5671 case V4SFmode:
5672 type = int_ftype_int_v4sf_v4sf;
5673 break;
5674 default:
5675 abort ();
5678 def_builtin (dp->mask, dp->name, type, dp->code);
5681 /* Initialize the abs* operators. */
5682 d = (struct builtin_description *) bdesc_abs;
5683 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
5685 enum machine_mode mode0;
5686 tree type;
5688 mode0 = insn_data[d->icode].operand[0].mode;
5690 switch (mode0)
5692 case V4SImode:
5693 type = v4si_ftype_v4si;
5694 break;
5695 case V8HImode:
5696 type = v8hi_ftype_v8hi;
5697 break;
5698 case V16QImode:
5699 type = v16qi_ftype_v16qi;
5700 break;
5701 case V4SFmode:
5702 type = v4sf_ftype_v4sf;
5703 break;
5704 default:
5705 abort ();
5708 def_builtin (d->mask, d->name, type, d->code);
5712 static void
5713 rs6000_common_init_builtins ()
5715 struct builtin_description *d;
5716 size_t i;
5718 tree v4sf_ftype_v4sf_v4sf_v16qi
5719 = build_function_type_list (V4SF_type_node,
5720 V4SF_type_node, V4SF_type_node,
5721 V16QI_type_node, NULL_TREE);
5722 tree v4si_ftype_v4si_v4si_v16qi
5723 = build_function_type_list (V4SI_type_node,
5724 V4SI_type_node, V4SI_type_node,
5725 V16QI_type_node, NULL_TREE);
5726 tree v8hi_ftype_v8hi_v8hi_v16qi
5727 = build_function_type_list (V8HI_type_node,
5728 V8HI_type_node, V8HI_type_node,
5729 V16QI_type_node, NULL_TREE);
5730 tree v16qi_ftype_v16qi_v16qi_v16qi
5731 = build_function_type_list (V16QI_type_node,
5732 V16QI_type_node, V16QI_type_node,
5733 V16QI_type_node, NULL_TREE);
5734 tree v4si_ftype_char
5735 = build_function_type_list (V4SI_type_node, char_type_node, NULL_TREE);
5736 tree v8hi_ftype_char
5737 = build_function_type_list (V8HI_type_node, char_type_node, NULL_TREE);
5738 tree v16qi_ftype_char
5739 = build_function_type_list (V16QI_type_node, char_type_node, NULL_TREE);
5740 tree v8hi_ftype_v16qi
5741 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
5742 tree v4sf_ftype_v4sf
5743 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
5745 tree v2si_ftype_v2si_v2si
5746 = build_function_type_list (V2SI_type_node,
5747 V2SI_type_node, V2SI_type_node, NULL_TREE);
5749 tree v2sf_ftype_v2sf_v2sf
5750 = build_function_type_list (V2SF_type_node,
5751 V2SF_type_node, V2SF_type_node, NULL_TREE);
5753 tree v2si_ftype_int_int
5754 = build_function_type_list (V2SI_type_node,
5755 integer_type_node, integer_type_node,
5756 NULL_TREE);
5758 tree v2si_ftype_v2si
5759 = build_function_type_list (V2SI_type_node, V2SI_type_node, NULL_TREE);
5761 tree v2sf_ftype_v2sf
5762 = build_function_type_list (V2SF_type_node,
5763 V2SF_type_node, NULL_TREE);
5765 tree v2sf_ftype_v2si
5766 = build_function_type_list (V2SF_type_node,
5767 V2SI_type_node, NULL_TREE);
5769 tree v2si_ftype_v2sf
5770 = build_function_type_list (V2SI_type_node,
5771 V2SF_type_node, NULL_TREE);
5773 tree v2si_ftype_v2si_char
5774 = build_function_type_list (V2SI_type_node,
5775 V2SI_type_node, char_type_node, NULL_TREE);
5777 tree v2si_ftype_int_char
5778 = build_function_type_list (V2SI_type_node,
5779 integer_type_node, char_type_node, NULL_TREE);
5781 tree v2si_ftype_char
5782 = build_function_type_list (V2SI_type_node, char_type_node, NULL_TREE);
5784 tree int_ftype_int_int
5785 = build_function_type_list (integer_type_node,
5786 integer_type_node, integer_type_node,
5787 NULL_TREE);
5789 tree v4si_ftype_v4si_v4si
5790 = build_function_type_list (V4SI_type_node,
5791 V4SI_type_node, V4SI_type_node, NULL_TREE);
5792 tree v4sf_ftype_v4si_char
5793 = build_function_type_list (V4SF_type_node,
5794 V4SI_type_node, char_type_node, NULL_TREE);
5795 tree v4si_ftype_v4sf_char
5796 = build_function_type_list (V4SI_type_node,
5797 V4SF_type_node, char_type_node, NULL_TREE);
5798 tree v4si_ftype_v4si_char
5799 = build_function_type_list (V4SI_type_node,
5800 V4SI_type_node, char_type_node, NULL_TREE);
5801 tree v8hi_ftype_v8hi_char
5802 = build_function_type_list (V8HI_type_node,
5803 V8HI_type_node, char_type_node, NULL_TREE);
5804 tree v16qi_ftype_v16qi_char
5805 = build_function_type_list (V16QI_type_node,
5806 V16QI_type_node, char_type_node, NULL_TREE);
5807 tree v16qi_ftype_v16qi_v16qi_char
5808 = build_function_type_list (V16QI_type_node,
5809 V16QI_type_node, V16QI_type_node,
5810 char_type_node, NULL_TREE);
5811 tree v8hi_ftype_v8hi_v8hi_char
5812 = build_function_type_list (V8HI_type_node,
5813 V8HI_type_node, V8HI_type_node,
5814 char_type_node, NULL_TREE);
5815 tree v4si_ftype_v4si_v4si_char
5816 = build_function_type_list (V4SI_type_node,
5817 V4SI_type_node, V4SI_type_node,
5818 char_type_node, NULL_TREE);
5819 tree v4sf_ftype_v4sf_v4sf_char
5820 = build_function_type_list (V4SF_type_node,
5821 V4SF_type_node, V4SF_type_node,
5822 char_type_node, NULL_TREE);
5823 tree v4sf_ftype_v4sf_v4sf
5824 = build_function_type_list (V4SF_type_node,
5825 V4SF_type_node, V4SF_type_node, NULL_TREE);
5826 tree v4sf_ftype_v4sf_v4sf_v4si
5827 = build_function_type_list (V4SF_type_node,
5828 V4SF_type_node, V4SF_type_node,
5829 V4SI_type_node, NULL_TREE);
5830 tree v4sf_ftype_v4sf_v4sf_v4sf
5831 = build_function_type_list (V4SF_type_node,
5832 V4SF_type_node, V4SF_type_node,
5833 V4SF_type_node, NULL_TREE);
5834 tree v4si_ftype_v4si_v4si_v4si
5835 = build_function_type_list (V4SI_type_node,
5836 V4SI_type_node, V4SI_type_node,
5837 V4SI_type_node, NULL_TREE);
5838 tree v8hi_ftype_v8hi_v8hi
5839 = build_function_type_list (V8HI_type_node,
5840 V8HI_type_node, V8HI_type_node, NULL_TREE);
5841 tree v8hi_ftype_v8hi_v8hi_v8hi
5842 = build_function_type_list (V8HI_type_node,
5843 V8HI_type_node, V8HI_type_node,
5844 V8HI_type_node, NULL_TREE);
5845 tree v4si_ftype_v8hi_v8hi_v4si
5846 = build_function_type_list (V4SI_type_node,
5847 V8HI_type_node, V8HI_type_node,
5848 V4SI_type_node, NULL_TREE);
5849 tree v4si_ftype_v16qi_v16qi_v4si
5850 = build_function_type_list (V4SI_type_node,
5851 V16QI_type_node, V16QI_type_node,
5852 V4SI_type_node, NULL_TREE);
5853 tree v16qi_ftype_v16qi_v16qi
5854 = build_function_type_list (V16QI_type_node,
5855 V16QI_type_node, V16QI_type_node, NULL_TREE);
5856 tree v4si_ftype_v4sf_v4sf
5857 = build_function_type_list (V4SI_type_node,
5858 V4SF_type_node, V4SF_type_node, NULL_TREE);
5859 tree v8hi_ftype_v16qi_v16qi
5860 = build_function_type_list (V8HI_type_node,
5861 V16QI_type_node, V16QI_type_node, NULL_TREE);
5862 tree v4si_ftype_v8hi_v8hi
5863 = build_function_type_list (V4SI_type_node,
5864 V8HI_type_node, V8HI_type_node, NULL_TREE);
5865 tree v8hi_ftype_v4si_v4si
5866 = build_function_type_list (V8HI_type_node,
5867 V4SI_type_node, V4SI_type_node, NULL_TREE);
5868 tree v16qi_ftype_v8hi_v8hi
5869 = build_function_type_list (V16QI_type_node,
5870 V8HI_type_node, V8HI_type_node, NULL_TREE);
5871 tree v4si_ftype_v16qi_v4si
5872 = build_function_type_list (V4SI_type_node,
5873 V16QI_type_node, V4SI_type_node, NULL_TREE);
5874 tree v4si_ftype_v16qi_v16qi
5875 = build_function_type_list (V4SI_type_node,
5876 V16QI_type_node, V16QI_type_node, NULL_TREE);
5877 tree v4si_ftype_v8hi_v4si
5878 = build_function_type_list (V4SI_type_node,
5879 V8HI_type_node, V4SI_type_node, NULL_TREE);
5880 tree v4si_ftype_v8hi
5881 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
5882 tree int_ftype_v4si_v4si
5883 = build_function_type_list (integer_type_node,
5884 V4SI_type_node, V4SI_type_node, NULL_TREE);
5885 tree int_ftype_v4sf_v4sf
5886 = build_function_type_list (integer_type_node,
5887 V4SF_type_node, V4SF_type_node, NULL_TREE);
5888 tree int_ftype_v16qi_v16qi
5889 = build_function_type_list (integer_type_node,
5890 V16QI_type_node, V16QI_type_node, NULL_TREE);
5891 tree int_ftype_v8hi_v8hi
5892 = build_function_type_list (integer_type_node,
5893 V8HI_type_node, V8HI_type_node, NULL_TREE);
5895 /* Add the simple ternary operators. */
5896 d = (struct builtin_description *) bdesc_3arg;
5897 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
5900 enum machine_mode mode0, mode1, mode2, mode3;
5901 tree type;
5903 if (d->name == 0 || d->icode == CODE_FOR_nothing)
5904 continue;
5906 mode0 = insn_data[d->icode].operand[0].mode;
5907 mode1 = insn_data[d->icode].operand[1].mode;
5908 mode2 = insn_data[d->icode].operand[2].mode;
5909 mode3 = insn_data[d->icode].operand[3].mode;
5911 /* When all four are of the same mode. */
5912 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
5914 switch (mode0)
5916 case V4SImode:
5917 type = v4si_ftype_v4si_v4si_v4si;
5918 break;
5919 case V4SFmode:
5920 type = v4sf_ftype_v4sf_v4sf_v4sf;
5921 break;
5922 case V8HImode:
5923 type = v8hi_ftype_v8hi_v8hi_v8hi;
5924 break;
5925 case V16QImode:
5926 type = v16qi_ftype_v16qi_v16qi_v16qi;
5927 break;
5928 default:
5929 abort();
5932 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
5934 switch (mode0)
5936 case V4SImode:
5937 type = v4si_ftype_v4si_v4si_v16qi;
5938 break;
5939 case V4SFmode:
5940 type = v4sf_ftype_v4sf_v4sf_v16qi;
5941 break;
5942 case V8HImode:
5943 type = v8hi_ftype_v8hi_v8hi_v16qi;
5944 break;
5945 case V16QImode:
5946 type = v16qi_ftype_v16qi_v16qi_v16qi;
5947 break;
5948 default:
5949 abort();
5952 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
5953 && mode3 == V4SImode)
5954 type = v4si_ftype_v16qi_v16qi_v4si;
5955 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
5956 && mode3 == V4SImode)
5957 type = v4si_ftype_v8hi_v8hi_v4si;
5958 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
5959 && mode3 == V4SImode)
5960 type = v4sf_ftype_v4sf_v4sf_v4si;
5962 /* vchar, vchar, vchar, 4 bit literal. */
5963 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
5964 && mode3 == QImode)
5965 type = v16qi_ftype_v16qi_v16qi_char;
5967 /* vshort, vshort, vshort, 4 bit literal. */
5968 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
5969 && mode3 == QImode)
5970 type = v8hi_ftype_v8hi_v8hi_char;
5972 /* vint, vint, vint, 4 bit literal. */
5973 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
5974 && mode3 == QImode)
5975 type = v4si_ftype_v4si_v4si_char;
5977 /* vfloat, vfloat, vfloat, 4 bit literal. */
5978 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
5979 && mode3 == QImode)
5980 type = v4sf_ftype_v4sf_v4sf_char;
5982 else
5983 abort ();
5985 def_builtin (d->mask, d->name, type, d->code);
5988 /* Add the simple binary operators. */
5989 d = (struct builtin_description *) bdesc_2arg;
5990 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
5992 enum machine_mode mode0, mode1, mode2;
5993 tree type;
5995 if (d->name == 0 || d->icode == CODE_FOR_nothing)
5996 continue;
5998 mode0 = insn_data[d->icode].operand[0].mode;
5999 mode1 = insn_data[d->icode].operand[1].mode;
6000 mode2 = insn_data[d->icode].operand[2].mode;
6002 /* When all three operands are of the same mode. */
6003 if (mode0 == mode1 && mode1 == mode2)
6005 switch (mode0)
6007 case V4SFmode:
6008 type = v4sf_ftype_v4sf_v4sf;
6009 break;
6010 case V4SImode:
6011 type = v4si_ftype_v4si_v4si;
6012 break;
6013 case V16QImode:
6014 type = v16qi_ftype_v16qi_v16qi;
6015 break;
6016 case V8HImode:
6017 type = v8hi_ftype_v8hi_v8hi;
6018 break;
6019 case V2SImode:
6020 type = v2si_ftype_v2si_v2si;
6021 break;
6022 case V2SFmode:
6023 type = v2sf_ftype_v2sf_v2sf;
6024 break;
6025 case SImode:
6026 type = int_ftype_int_int;
6027 break;
6028 default:
6029 abort ();
6033 /* A few other combos we really don't want to do manually. */
6035 /* vint, vfloat, vfloat. */
6036 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
6037 type = v4si_ftype_v4sf_v4sf;
6039 /* vshort, vchar, vchar. */
6040 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
6041 type = v8hi_ftype_v16qi_v16qi;
6043 /* vint, vshort, vshort. */
6044 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
6045 type = v4si_ftype_v8hi_v8hi;
6047 /* vshort, vint, vint. */
6048 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
6049 type = v8hi_ftype_v4si_v4si;
6051 /* vchar, vshort, vshort. */
6052 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
6053 type = v16qi_ftype_v8hi_v8hi;
6055 /* vint, vchar, vint. */
6056 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
6057 type = v4si_ftype_v16qi_v4si;
6059 /* vint, vchar, vchar. */
6060 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
6061 type = v4si_ftype_v16qi_v16qi;
6063 /* vint, vshort, vint. */
6064 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
6065 type = v4si_ftype_v8hi_v4si;
6067 /* vint, vint, 5 bit literal. */
6068 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
6069 type = v4si_ftype_v4si_char;
6071 /* vshort, vshort, 5 bit literal. */
6072 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
6073 type = v8hi_ftype_v8hi_char;
6075 /* vchar, vchar, 5 bit literal. */
6076 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
6077 type = v16qi_ftype_v16qi_char;
6079 /* vfloat, vint, 5 bit literal. */
6080 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
6081 type = v4sf_ftype_v4si_char;
6083 /* vint, vfloat, 5 bit literal. */
6084 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
6085 type = v4si_ftype_v4sf_char;
6087 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
6088 type = v2si_ftype_int_int;
6090 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
6091 type = v2si_ftype_v2si_char;
6093 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
6094 type = v2si_ftype_int_char;
6096 /* int, x, x. */
6097 else if (mode0 == SImode)
6099 switch (mode1)
6101 case V4SImode:
6102 type = int_ftype_v4si_v4si;
6103 break;
6104 case V4SFmode:
6105 type = int_ftype_v4sf_v4sf;
6106 break;
6107 case V16QImode:
6108 type = int_ftype_v16qi_v16qi;
6109 break;
6110 case V8HImode:
6111 type = int_ftype_v8hi_v8hi;
6112 break;
6113 default:
6114 abort ();
6118 else
6119 abort ();
6121 def_builtin (d->mask, d->name, type, d->code);
6124 /* Add the simple unary operators. */
6125 d = (struct builtin_description *) bdesc_1arg;
6126 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6128 enum machine_mode mode0, mode1;
6129 tree type;
6131 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6132 continue;
6134 mode0 = insn_data[d->icode].operand[0].mode;
6135 mode1 = insn_data[d->icode].operand[1].mode;
6137 if (mode0 == V4SImode && mode1 == QImode)
6138 type = v4si_ftype_char;
6139 else if (mode0 == V8HImode && mode1 == QImode)
6140 type = v8hi_ftype_char;
6141 else if (mode0 == V16QImode && mode1 == QImode)
6142 type = v16qi_ftype_char;
6143 else if (mode0 == V4SFmode && mode1 == V4SFmode)
6144 type = v4sf_ftype_v4sf;
6145 else if (mode0 == V8HImode && mode1 == V16QImode)
6146 type = v8hi_ftype_v16qi;
6147 else if (mode0 == V4SImode && mode1 == V8HImode)
6148 type = v4si_ftype_v8hi;
6149 else if (mode0 == V2SImode && mode1 == V2SImode)
6150 type = v2si_ftype_v2si;
6151 else if (mode0 == V2SFmode && mode1 == V2SFmode)
6152 type = v2sf_ftype_v2sf;
6153 else if (mode0 == V2SFmode && mode1 == V2SImode)
6154 type = v2sf_ftype_v2si;
6155 else if (mode0 == V2SImode && mode1 == V2SFmode)
6156 type = v2si_ftype_v2sf;
6157 else if (mode0 == V2SImode && mode1 == QImode)
6158 type = v2si_ftype_char;
6159 else
6160 abort ();
6162 def_builtin (d->mask, d->name, type, d->code);
6167 /* Expand a block move operation, and return 1 if successful. Return 0
6168 if we should let the compiler generate normal code.
6170 operands[0] is the destination
6171 operands[1] is the source
6172 operands[2] is the length
6173 operands[3] is the alignment */
6175 #define MAX_MOVE_REG 4
6178 expand_block_move (operands)
6179 rtx operands[];
6181 rtx orig_dest = operands[0];
6182 rtx orig_src = operands[1];
6183 rtx bytes_rtx = operands[2];
6184 rtx align_rtx = operands[3];
6185 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
6186 int align;
6187 int bytes;
6188 int offset;
6189 int move_bytes;
6191 /* If this is not a fixed size move, just call memcpy */
6192 if (! constp)
6193 return 0;
6195 /* If this is not a fixed size alignment, abort */
6196 if (GET_CODE (align_rtx) != CONST_INT)
6197 abort ();
6198 align = INTVAL (align_rtx);
6200 /* Anything to move? */
6201 bytes = INTVAL (bytes_rtx);
6202 if (bytes <= 0)
6203 return 1;
6205 /* store_one_arg depends on expand_block_move to handle at least the size of
6206 reg_parm_stack_space. */
6207 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
6208 return 0;
6210 if (TARGET_STRING) /* string instructions are available */
6212 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
6214 union {
6215 rtx (*movstrsi) PARAMS ((rtx, rtx, rtx, rtx));
6216 rtx (*mov) PARAMS ((rtx, rtx));
6217 } gen_func;
6218 enum machine_mode mode = BLKmode;
6219 rtx src, dest;
6221 if (bytes > 24 /* move up to 32 bytes at a time */
6222 && ! fixed_regs[5]
6223 && ! fixed_regs[6]
6224 && ! fixed_regs[7]
6225 && ! fixed_regs[8]
6226 && ! fixed_regs[9]
6227 && ! fixed_regs[10]
6228 && ! fixed_regs[11]
6229 && ! fixed_regs[12])
6231 move_bytes = (bytes > 32) ? 32 : bytes;
6232 gen_func.movstrsi = gen_movstrsi_8reg;
6234 else if (bytes > 16 /* move up to 24 bytes at a time */
6235 && ! fixed_regs[5]
6236 && ! fixed_regs[6]
6237 && ! fixed_regs[7]
6238 && ! fixed_regs[8]
6239 && ! fixed_regs[9]
6240 && ! fixed_regs[10])
6242 move_bytes = (bytes > 24) ? 24 : bytes;
6243 gen_func.movstrsi = gen_movstrsi_6reg;
6245 else if (bytes > 8 /* move up to 16 bytes at a time */
6246 && ! fixed_regs[5]
6247 && ! fixed_regs[6]
6248 && ! fixed_regs[7]
6249 && ! fixed_regs[8])
6251 move_bytes = (bytes > 16) ? 16 : bytes;
6252 gen_func.movstrsi = gen_movstrsi_4reg;
6254 else if (bytes >= 8 && TARGET_POWERPC64
6255 /* 64-bit loads and stores require word-aligned
6256 displacements. */
6257 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
6259 move_bytes = 8;
6260 mode = DImode;
6261 gen_func.mov = gen_movdi;
6263 else if (bytes > 4 && !TARGET_POWERPC64)
6264 { /* move up to 8 bytes at a time */
6265 move_bytes = (bytes > 8) ? 8 : bytes;
6266 gen_func.movstrsi = gen_movstrsi_2reg;
6268 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
6269 { /* move 4 bytes */
6270 move_bytes = 4;
6271 mode = SImode;
6272 gen_func.mov = gen_movsi;
6274 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
6275 { /* move 2 bytes */
6276 move_bytes = 2;
6277 mode = HImode;
6278 gen_func.mov = gen_movhi;
6280 else if (bytes == 1) /* move 1 byte */
6282 move_bytes = 1;
6283 mode = QImode;
6284 gen_func.mov = gen_movqi;
6286 else
6287 { /* move up to 4 bytes at a time */
6288 move_bytes = (bytes > 4) ? 4 : bytes;
6289 gen_func.movstrsi = gen_movstrsi_1reg;
6292 src = adjust_address (orig_src, mode, offset);
6293 dest = adjust_address (orig_dest, mode, offset);
6295 if (mode == BLKmode)
6297 /* Move the address into scratch registers. The movstrsi
6298 patterns require zero offset. */
6299 if (!REG_P (XEXP (src, 0)))
6301 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
6302 src = replace_equiv_address (src, src_reg);
6304 set_mem_size (src, GEN_INT (move_bytes));
6306 if (!REG_P (XEXP (dest, 0)))
6308 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
6309 dest = replace_equiv_address (dest, dest_reg);
6311 set_mem_size (dest, GEN_INT (move_bytes));
6313 emit_insn ((*gen_func.movstrsi) (dest, src,
6314 GEN_INT (move_bytes & 31),
6315 align_rtx));
6317 else
6319 rtx tmp_reg = gen_reg_rtx (mode);
6321 emit_insn ((*gen_func.mov) (tmp_reg, src));
6322 emit_insn ((*gen_func.mov) (dest, tmp_reg));
6327 else /* string instructions not available */
6329 rtx stores[MAX_MOVE_REG];
6330 int num_reg = 0;
6331 int i;
6333 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
6335 rtx (*gen_mov_func) PARAMS ((rtx, rtx));
6336 enum machine_mode mode;
6337 rtx src, dest, tmp_reg;
6339 /* Generate the appropriate load and store, saving the stores
6340 for later. */
6341 if (bytes >= 8 && TARGET_POWERPC64
6342 /* 64-bit loads and stores require word-aligned
6343 displacements. */
6344 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
6346 move_bytes = 8;
6347 mode = DImode;
6348 gen_mov_func = gen_movdi;
6350 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
6352 move_bytes = 4;
6353 mode = SImode;
6354 gen_mov_func = gen_movsi;
6356 else if (bytes >= 2 && (align >= 2 || ! STRICT_ALIGNMENT))
6358 move_bytes = 2;
6359 mode = HImode;
6360 gen_mov_func = gen_movhi;
6362 else
6364 move_bytes = 1;
6365 mode = QImode;
6366 gen_mov_func = gen_movqi;
6369 src = adjust_address (orig_src, mode, offset);
6370 dest = adjust_address (orig_dest, mode, offset);
6371 tmp_reg = gen_reg_rtx (mode);
6373 emit_insn ((*gen_mov_func) (tmp_reg, src));
6374 stores[num_reg++] = (*gen_mov_func) (dest, tmp_reg);
6376 if (num_reg >= MAX_MOVE_REG)
6378 for (i = 0; i < num_reg; i++)
6379 emit_insn (stores[i]);
6380 num_reg = 0;
6384 for (i = 0; i < num_reg; i++)
6385 emit_insn (stores[i]);
6388 return 1;
6392 /* Return 1 if OP is a load multiple operation. It is known to be a
6393 PARALLEL and the first section will be tested. */
6396 load_multiple_operation (op, mode)
6397 rtx op;
6398 enum machine_mode mode ATTRIBUTE_UNUSED;
6400 int count = XVECLEN (op, 0);
6401 unsigned int dest_regno;
6402 rtx src_addr;
6403 int i;
6405 /* Perform a quick check so we don't blow up below. */
6406 if (count <= 1
6407 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6408 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6409 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
6410 return 0;
6412 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6413 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
6415 for (i = 1; i < count; i++)
6417 rtx elt = XVECEXP (op, 0, i);
6419 if (GET_CODE (elt) != SET
6420 || GET_CODE (SET_DEST (elt)) != REG
6421 || GET_MODE (SET_DEST (elt)) != SImode
6422 || REGNO (SET_DEST (elt)) != dest_regno + i
6423 || GET_CODE (SET_SRC (elt)) != MEM
6424 || GET_MODE (SET_SRC (elt)) != SImode
6425 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
6426 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
6427 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
6428 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
6429 return 0;
6432 return 1;
6435 /* Similar, but tests for store multiple. Here, the second vector element
6436 is a CLOBBER. It will be tested later. */
6439 store_multiple_operation (op, mode)
6440 rtx op;
6441 enum machine_mode mode ATTRIBUTE_UNUSED;
6443 int count = XVECLEN (op, 0) - 1;
6444 unsigned int src_regno;
6445 rtx dest_addr;
6446 int i;
6448 /* Perform a quick check so we don't blow up below. */
6449 if (count <= 1
6450 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6451 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
6452 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
6453 return 0;
6455 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
6456 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
6458 for (i = 1; i < count; i++)
6460 rtx elt = XVECEXP (op, 0, i + 1);
6462 if (GET_CODE (elt) != SET
6463 || GET_CODE (SET_SRC (elt)) != REG
6464 || GET_MODE (SET_SRC (elt)) != SImode
6465 || REGNO (SET_SRC (elt)) != src_regno + i
6466 || GET_CODE (SET_DEST (elt)) != MEM
6467 || GET_MODE (SET_DEST (elt)) != SImode
6468 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
6469 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
6470 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
6471 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
6472 return 0;
6475 return 1;
6478 /* Return a string to perform a load_multiple operation.
6479 operands[0] is the vector.
6480 operands[1] is the source address.
6481 operands[2] is the first destination register. */
6483 const char *
6484 rs6000_output_load_multiple (operands)
6485 rtx operands[3];
6487 /* We have to handle the case where the pseudo used to contain the address
6488 is assigned to one of the output registers. */
6489 int i, j;
6490 int words = XVECLEN (operands[0], 0);
6491 rtx xop[10];
6493 if (XVECLEN (operands[0], 0) == 1)
6494 return "{l|lwz} %2,0(%1)";
6496 for (i = 0; i < words; i++)
6497 if (refers_to_regno_p (REGNO (operands[2]) + i,
6498 REGNO (operands[2]) + i + 1, operands[1], 0))
6500 if (i == words-1)
6502 xop[0] = GEN_INT (4 * (words-1));
6503 xop[1] = operands[1];
6504 xop[2] = operands[2];
6505 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
6506 return "";
6508 else if (i == 0)
6510 xop[0] = GEN_INT (4 * (words-1));
6511 xop[1] = operands[1];
6512 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
6513 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
6514 return "";
6516 else
6518 for (j = 0; j < words; j++)
6519 if (j != i)
6521 xop[0] = GEN_INT (j * 4);
6522 xop[1] = operands[1];
6523 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
6524 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
6526 xop[0] = GEN_INT (i * 4);
6527 xop[1] = operands[1];
6528 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
6529 return "";
6533 return "{lsi|lswi} %2,%1,%N0";
6536 /* Return 1 for a parallel vrsave operation. */
6539 vrsave_operation (op, mode)
6540 rtx op;
6541 enum machine_mode mode ATTRIBUTE_UNUSED;
6543 int count = XVECLEN (op, 0);
6544 unsigned int dest_regno, src_regno;
6545 int i;
6547 if (count <= 1
6548 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6549 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6550 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
6551 return 0;
6553 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6554 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
6556 if (dest_regno != VRSAVE_REGNO
6557 && src_regno != VRSAVE_REGNO)
6558 return 0;
6560 for (i = 1; i < count; i++)
6562 rtx elt = XVECEXP (op, 0, i);
6564 if (GET_CODE (elt) != CLOBBER
6565 && GET_CODE (elt) != SET)
6566 return 0;
6569 return 1;
6572 /* Return 1 for an PARALLEL suitable for mtcrf. */
6575 mtcrf_operation (op, mode)
6576 rtx op;
6577 enum machine_mode mode ATTRIBUTE_UNUSED;
6579 int count = XVECLEN (op, 0);
6580 int i;
6581 rtx src_reg;
6583 /* Perform a quick check so we don't blow up below. */
6584 if (count < 1
6585 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6586 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
6587 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
6588 return 0;
6589 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
6591 if (GET_CODE (src_reg) != REG
6592 || GET_MODE (src_reg) != SImode
6593 || ! INT_REGNO_P (REGNO (src_reg)))
6594 return 0;
6596 for (i = 0; i < count; i++)
6598 rtx exp = XVECEXP (op, 0, i);
6599 rtx unspec;
6600 int maskval;
6602 if (GET_CODE (exp) != SET
6603 || GET_CODE (SET_DEST (exp)) != REG
6604 || GET_MODE (SET_DEST (exp)) != CCmode
6605 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
6606 return 0;
6607 unspec = SET_SRC (exp);
6608 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
6610 if (GET_CODE (unspec) != UNSPEC
6611 || XINT (unspec, 1) != 20
6612 || XVECLEN (unspec, 0) != 2
6613 || XVECEXP (unspec, 0, 0) != src_reg
6614 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
6615 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
6616 return 0;
6618 return 1;
6621 /* Return 1 for an PARALLEL suitable for lmw. */
6624 lmw_operation (op, mode)
6625 rtx op;
6626 enum machine_mode mode ATTRIBUTE_UNUSED;
6628 int count = XVECLEN (op, 0);
6629 unsigned int dest_regno;
6630 rtx src_addr;
6631 unsigned int base_regno;
6632 HOST_WIDE_INT offset;
6633 int i;
6635 /* Perform a quick check so we don't blow up below. */
6636 if (count <= 1
6637 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6638 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6639 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
6640 return 0;
6642 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6643 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
6645 if (dest_regno > 31
6646 || count != 32 - (int) dest_regno)
6647 return 0;
6649 if (LEGITIMATE_INDIRECT_ADDRESS_P (src_addr, 0))
6651 offset = 0;
6652 base_regno = REGNO (src_addr);
6653 if (base_regno == 0)
6654 return 0;
6656 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, src_addr, 0))
6658 offset = INTVAL (XEXP (src_addr, 1));
6659 base_regno = REGNO (XEXP (src_addr, 0));
6661 else
6662 return 0;
6664 for (i = 0; i < count; i++)
6666 rtx elt = XVECEXP (op, 0, i);
6667 rtx newaddr;
6668 rtx addr_reg;
6669 HOST_WIDE_INT newoffset;
6671 if (GET_CODE (elt) != SET
6672 || GET_CODE (SET_DEST (elt)) != REG
6673 || GET_MODE (SET_DEST (elt)) != SImode
6674 || REGNO (SET_DEST (elt)) != dest_regno + i
6675 || GET_CODE (SET_SRC (elt)) != MEM
6676 || GET_MODE (SET_SRC (elt)) != SImode)
6677 return 0;
6678 newaddr = XEXP (SET_SRC (elt), 0);
6679 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
6681 newoffset = 0;
6682 addr_reg = newaddr;
6684 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
6686 addr_reg = XEXP (newaddr, 0);
6687 newoffset = INTVAL (XEXP (newaddr, 1));
6689 else
6690 return 0;
6691 if (REGNO (addr_reg) != base_regno
6692 || newoffset != offset + 4 * i)
6693 return 0;
6696 return 1;
6699 /* Return 1 for an PARALLEL suitable for stmw. */
6702 stmw_operation (op, mode)
6703 rtx op;
6704 enum machine_mode mode ATTRIBUTE_UNUSED;
6706 int count = XVECLEN (op, 0);
6707 unsigned int src_regno;
6708 rtx dest_addr;
6709 unsigned int base_regno;
6710 HOST_WIDE_INT offset;
6711 int i;
6713 /* Perform a quick check so we don't blow up below. */
6714 if (count <= 1
6715 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6716 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
6717 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
6718 return 0;
6720 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
6721 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
6723 if (src_regno > 31
6724 || count != 32 - (int) src_regno)
6725 return 0;
6727 if (LEGITIMATE_INDIRECT_ADDRESS_P (dest_addr, 0))
6729 offset = 0;
6730 base_regno = REGNO (dest_addr);
6731 if (base_regno == 0)
6732 return 0;
6734 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, dest_addr, 0))
6736 offset = INTVAL (XEXP (dest_addr, 1));
6737 base_regno = REGNO (XEXP (dest_addr, 0));
6739 else
6740 return 0;
6742 for (i = 0; i < count; i++)
6744 rtx elt = XVECEXP (op, 0, i);
6745 rtx newaddr;
6746 rtx addr_reg;
6747 HOST_WIDE_INT newoffset;
6749 if (GET_CODE (elt) != SET
6750 || GET_CODE (SET_SRC (elt)) != REG
6751 || GET_MODE (SET_SRC (elt)) != SImode
6752 || REGNO (SET_SRC (elt)) != src_regno + i
6753 || GET_CODE (SET_DEST (elt)) != MEM
6754 || GET_MODE (SET_DEST (elt)) != SImode)
6755 return 0;
6756 newaddr = XEXP (SET_DEST (elt), 0);
6757 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
6759 newoffset = 0;
6760 addr_reg = newaddr;
6762 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
6764 addr_reg = XEXP (newaddr, 0);
6765 newoffset = INTVAL (XEXP (newaddr, 1));
6767 else
6768 return 0;
6769 if (REGNO (addr_reg) != base_regno
6770 || newoffset != offset + 4 * i)
6771 return 0;
6774 return 1;
6777 /* A validation routine: say whether CODE, a condition code, and MODE
6778 match. The other alternatives either don't make sense or should
6779 never be generated. */
6781 static void
6782 validate_condition_mode (code, mode)
6783 enum rtx_code code;
6784 enum machine_mode mode;
6786 if (GET_RTX_CLASS (code) != '<'
6787 || GET_MODE_CLASS (mode) != MODE_CC)
6788 abort ();
6790 /* These don't make sense. */
6791 if ((code == GT || code == LT || code == GE || code == LE)
6792 && mode == CCUNSmode)
6793 abort ();
6795 if ((code == GTU || code == LTU || code == GEU || code == LEU)
6796 && mode != CCUNSmode)
6797 abort ();
6799 if (mode != CCFPmode
6800 && (code == ORDERED || code == UNORDERED
6801 || code == UNEQ || code == LTGT
6802 || code == UNGT || code == UNLT
6803 || code == UNGE || code == UNLE))
6804 abort ();
6806 /* These should never be generated except for
6807 flag_unsafe_math_optimizations and flag_finite_math_only. */
6808 if (mode == CCFPmode
6809 && ! flag_unsafe_math_optimizations
6810 && ! flag_finite_math_only
6811 && (code == LE || code == GE
6812 || code == UNEQ || code == LTGT
6813 || code == UNGT || code == UNLT))
6814 abort ();
6816 /* These are invalid; the information is not there. */
6817 if (mode == CCEQmode
6818 && code != EQ && code != NE)
6819 abort ();
6822 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
6823 We only check the opcode against the mode of the CC value here. */
6826 branch_comparison_operator (op, mode)
6827 rtx op;
6828 enum machine_mode mode ATTRIBUTE_UNUSED;
6830 enum rtx_code code = GET_CODE (op);
6831 enum machine_mode cc_mode;
6833 if (GET_RTX_CLASS (code) != '<')
6834 return 0;
6836 cc_mode = GET_MODE (XEXP (op, 0));
6837 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
6838 return 0;
6840 validate_condition_mode (code, cc_mode);
6842 return 1;
6845 /* Return 1 if OP is a comparison operation that is valid for a branch
6846 insn and which is true if the corresponding bit in the CC register
6847 is set. */
6850 branch_positive_comparison_operator (op, mode)
6851 rtx op;
6852 enum machine_mode mode;
6854 enum rtx_code code;
6856 if (! branch_comparison_operator (op, mode))
6857 return 0;
6859 code = GET_CODE (op);
6860 return (code == EQ || code == LT || code == GT
6861 || (TARGET_SPE && TARGET_HARD_FLOAT && !TARGET_FPRS && code == NE)
6862 || code == LTU || code == GTU
6863 || code == UNORDERED);
6866 /* Return 1 if OP is a comparison operation that is valid for an scc insn.
6867 We check the opcode against the mode of the CC value and disallow EQ or
6868 NE comparisons for integers. */
6871 scc_comparison_operator (op, mode)
6872 rtx op;
6873 enum machine_mode mode;
6875 enum rtx_code code = GET_CODE (op);
6876 enum machine_mode cc_mode;
6878 if (GET_MODE (op) != mode && mode != VOIDmode)
6879 return 0;
6881 if (GET_RTX_CLASS (code) != '<')
6882 return 0;
6884 cc_mode = GET_MODE (XEXP (op, 0));
6885 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
6886 return 0;
6888 validate_condition_mode (code, cc_mode);
6890 if (code == NE && cc_mode != CCFPmode)
6891 return 0;
6893 return 1;
6897 trap_comparison_operator (op, mode)
6898 rtx op;
6899 enum machine_mode mode;
6901 if (mode != VOIDmode && mode != GET_MODE (op))
6902 return 0;
6903 return GET_RTX_CLASS (GET_CODE (op)) == '<';
6907 boolean_operator (op, mode)
6908 rtx op;
6909 enum machine_mode mode ATTRIBUTE_UNUSED;
6911 enum rtx_code code = GET_CODE (op);
6912 return (code == AND || code == IOR || code == XOR);
6916 boolean_or_operator (op, mode)
6917 rtx op;
6918 enum machine_mode mode ATTRIBUTE_UNUSED;
6920 enum rtx_code code = GET_CODE (op);
6921 return (code == IOR || code == XOR);
6925 min_max_operator (op, mode)
6926 rtx op;
6927 enum machine_mode mode ATTRIBUTE_UNUSED;
6929 enum rtx_code code = GET_CODE (op);
6930 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
6933 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
6934 mask required to convert the result of a rotate insn into a shift
6935 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
6938 includes_lshift_p (shiftop, andop)
6939 rtx shiftop;
6940 rtx andop;
6942 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
6944 shift_mask <<= INTVAL (shiftop);
6946 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
6949 /* Similar, but for right shift. */
6952 includes_rshift_p (shiftop, andop)
6953 rtx shiftop;
6954 rtx andop;
6956 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
6958 shift_mask >>= INTVAL (shiftop);
6960 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
6963 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
6964 to perform a left shift. It must have exactly SHIFTOP least
6965 significant 0's, then one or more 1's, then zero or more 0's. */
6968 includes_rldic_lshift_p (shiftop, andop)
6969 rtx shiftop;
6970 rtx andop;
6972 if (GET_CODE (andop) == CONST_INT)
6974 HOST_WIDE_INT c, lsb, shift_mask;
6976 c = INTVAL (andop);
6977 if (c == 0 || c == ~0)
6978 return 0;
6980 shift_mask = ~0;
6981 shift_mask <<= INTVAL (shiftop);
6983 /* Find the least significant one bit. */
6984 lsb = c & -c;
6986 /* It must coincide with the LSB of the shift mask. */
6987 if (-lsb != shift_mask)
6988 return 0;
6990 /* Invert to look for the next transition (if any). */
6991 c = ~c;
6993 /* Remove the low group of ones (originally low group of zeros). */
6994 c &= -lsb;
6996 /* Again find the lsb, and check we have all 1's above. */
6997 lsb = c & -c;
6998 return c == -lsb;
7000 else if (GET_CODE (andop) == CONST_DOUBLE
7001 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
7003 HOST_WIDE_INT low, high, lsb;
7004 HOST_WIDE_INT shift_mask_low, shift_mask_high;
7006 low = CONST_DOUBLE_LOW (andop);
7007 if (HOST_BITS_PER_WIDE_INT < 64)
7008 high = CONST_DOUBLE_HIGH (andop);
7010 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
7011 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
7012 return 0;
7014 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
7016 shift_mask_high = ~0;
7017 if (INTVAL (shiftop) > 32)
7018 shift_mask_high <<= INTVAL (shiftop) - 32;
7020 lsb = high & -high;
7022 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
7023 return 0;
7025 high = ~high;
7026 high &= -lsb;
7028 lsb = high & -high;
7029 return high == -lsb;
7032 shift_mask_low = ~0;
7033 shift_mask_low <<= INTVAL (shiftop);
7035 lsb = low & -low;
7037 if (-lsb != shift_mask_low)
7038 return 0;
7040 if (HOST_BITS_PER_WIDE_INT < 64)
7041 high = ~high;
7042 low = ~low;
7043 low &= -lsb;
7045 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
7047 lsb = high & -high;
7048 return high == -lsb;
7051 lsb = low & -low;
7052 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
7054 else
7055 return 0;
7058 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
7059 to perform a left shift. It must have SHIFTOP or more least
7060 signifigant 0's, with the remainder of the word 1's. */
7063 includes_rldicr_lshift_p (shiftop, andop)
7064 rtx shiftop;
7065 rtx andop;
7067 if (GET_CODE (andop) == CONST_INT)
7069 HOST_WIDE_INT c, lsb, shift_mask;
7071 shift_mask = ~0;
7072 shift_mask <<= INTVAL (shiftop);
7073 c = INTVAL (andop);
7075 /* Find the least signifigant one bit. */
7076 lsb = c & -c;
7078 /* It must be covered by the shift mask.
7079 This test also rejects c == 0. */
7080 if ((lsb & shift_mask) == 0)
7081 return 0;
7083 /* Check we have all 1's above the transition, and reject all 1's. */
7084 return c == -lsb && lsb != 1;
7086 else if (GET_CODE (andop) == CONST_DOUBLE
7087 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
7089 HOST_WIDE_INT low, lsb, shift_mask_low;
7091 low = CONST_DOUBLE_LOW (andop);
7093 if (HOST_BITS_PER_WIDE_INT < 64)
7095 HOST_WIDE_INT high, shift_mask_high;
7097 high = CONST_DOUBLE_HIGH (andop);
7099 if (low == 0)
7101 shift_mask_high = ~0;
7102 if (INTVAL (shiftop) > 32)
7103 shift_mask_high <<= INTVAL (shiftop) - 32;
7105 lsb = high & -high;
7107 if ((lsb & shift_mask_high) == 0)
7108 return 0;
7110 return high == -lsb;
7112 if (high != ~0)
7113 return 0;
7116 shift_mask_low = ~0;
7117 shift_mask_low <<= INTVAL (shiftop);
7119 lsb = low & -low;
7121 if ((lsb & shift_mask_low) == 0)
7122 return 0;
7124 return low == -lsb && lsb != 1;
7126 else
7127 return 0;
7130 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
7131 for lfq and stfq insns.
7133 Note reg1 and reg2 *must* be hard registers. To be sure we will
7134 abort if we are passed pseudo registers. */
7137 registers_ok_for_quad_peep (reg1, reg2)
7138 rtx reg1, reg2;
7140 /* We might have been passed a SUBREG. */
7141 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
7142 return 0;
7144 return (REGNO (reg1) == REGNO (reg2) - 1);
7147 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
7148 addr1 and addr2 must be in consecutive memory locations
7149 (addr2 == addr1 + 8). */
7152 addrs_ok_for_quad_peep (addr1, addr2)
7153 rtx addr1;
7154 rtx addr2;
7156 unsigned int reg1;
7157 int offset1;
7159 /* Extract an offset (if used) from the first addr. */
7160 if (GET_CODE (addr1) == PLUS)
7162 /* If not a REG, return zero. */
7163 if (GET_CODE (XEXP (addr1, 0)) != REG)
7164 return 0;
7165 else
7167 reg1 = REGNO (XEXP (addr1, 0));
7168 /* The offset must be constant! */
7169 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
7170 return 0;
7171 offset1 = INTVAL (XEXP (addr1, 1));
7174 else if (GET_CODE (addr1) != REG)
7175 return 0;
7176 else
7178 reg1 = REGNO (addr1);
7179 /* This was a simple (mem (reg)) expression. Offset is 0. */
7180 offset1 = 0;
7183 /* Make sure the second address is a (mem (plus (reg) (const_int))). */
7184 if (GET_CODE (addr2) != PLUS)
7185 return 0;
7187 if (GET_CODE (XEXP (addr2, 0)) != REG
7188 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
7189 return 0;
7191 if (reg1 != REGNO (XEXP (addr2, 0)))
7192 return 0;
7194 /* The offset for the second addr must be 8 more than the first addr. */
7195 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
7196 return 0;
7198 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
7199 instructions. */
7200 return 1;
7203 /* Return the register class of a scratch register needed to copy IN into
7204 or out of a register in CLASS in MODE. If it can be done directly,
7205 NO_REGS is returned. */
7207 enum reg_class
7208 secondary_reload_class (class, mode, in)
7209 enum reg_class class;
7210 enum machine_mode mode ATTRIBUTE_UNUSED;
7211 rtx in;
7213 int regno;
7215 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN && flag_pic))
7217 /* We cannot copy a symbolic operand directly into anything
7218 other than BASE_REGS for TARGET_ELF. So indicate that a
7219 register from BASE_REGS is needed as an intermediate
7220 register.
7222 On Darwin, pic addresses require a load from memory, which
7223 needs a base register. */
7224 if (class != BASE_REGS
7225 && (GET_CODE (in) == SYMBOL_REF
7226 || GET_CODE (in) == HIGH
7227 || GET_CODE (in) == LABEL_REF
7228 || GET_CODE (in) == CONST))
7229 return BASE_REGS;
7232 if (GET_CODE (in) == REG)
7234 regno = REGNO (in);
7235 if (regno >= FIRST_PSEUDO_REGISTER)
7237 regno = true_regnum (in);
7238 if (regno >= FIRST_PSEUDO_REGISTER)
7239 regno = -1;
7242 else if (GET_CODE (in) == SUBREG)
7244 regno = true_regnum (in);
7245 if (regno >= FIRST_PSEUDO_REGISTER)
7246 regno = -1;
7248 else
7249 regno = -1;
7251 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
7252 into anything. */
7253 if (class == GENERAL_REGS || class == BASE_REGS
7254 || (regno >= 0 && INT_REGNO_P (regno)))
7255 return NO_REGS;
7257 /* Constants, memory, and FP registers can go into FP registers. */
7258 if ((regno == -1 || FP_REGNO_P (regno))
7259 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
7260 return NO_REGS;
7262 /* Memory, and AltiVec registers can go into AltiVec registers. */
7263 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
7264 && class == ALTIVEC_REGS)
7265 return NO_REGS;
7267 /* We can copy among the CR registers. */
7268 if ((class == CR_REGS || class == CR0_REGS)
7269 && regno >= 0 && CR_REGNO_P (regno))
7270 return NO_REGS;
7272 /* Otherwise, we need GENERAL_REGS. */
7273 return GENERAL_REGS;
7276 /* Given a comparison operation, return the bit number in CCR to test. We
7277 know this is a valid comparison.
7279 SCC_P is 1 if this is for an scc. That means that %D will have been
7280 used instead of %C, so the bits will be in different places.
7282 Return -1 if OP isn't a valid comparison for some reason. */
7285 ccr_bit (op, scc_p)
7286 rtx op;
7287 int scc_p;
7289 enum rtx_code code = GET_CODE (op);
7290 enum machine_mode cc_mode;
7291 int cc_regnum;
7292 int base_bit;
7293 rtx reg;
7295 if (GET_RTX_CLASS (code) != '<')
7296 return -1;
7298 reg = XEXP (op, 0);
7300 if (GET_CODE (reg) != REG
7301 || ! CR_REGNO_P (REGNO (reg)))
7302 abort ();
7304 cc_mode = GET_MODE (reg);
7305 cc_regnum = REGNO (reg);
7306 base_bit = 4 * (cc_regnum - CR0_REGNO);
7308 validate_condition_mode (code, cc_mode);
7310 switch (code)
7312 case NE:
7313 if (TARGET_SPE && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
7314 return base_bit + 1;
7315 return scc_p ? base_bit + 3 : base_bit + 2;
7316 case EQ:
7317 if (TARGET_SPE && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
7318 return base_bit + 1;
7319 return base_bit + 2;
7320 case GT: case GTU: case UNLE:
7321 return base_bit + 1;
7322 case LT: case LTU: case UNGE:
7323 return base_bit;
7324 case ORDERED: case UNORDERED:
7325 return base_bit + 3;
7327 case GE: case GEU:
7328 /* If scc, we will have done a cror to put the bit in the
7329 unordered position. So test that bit. For integer, this is ! LT
7330 unless this is an scc insn. */
7331 return scc_p ? base_bit + 3 : base_bit;
7333 case LE: case LEU:
7334 return scc_p ? base_bit + 3 : base_bit + 1;
7336 default:
7337 abort ();
7341 /* Return the GOT register. */
7343 struct rtx_def *
7344 rs6000_got_register (value)
7345 rtx value ATTRIBUTE_UNUSED;
7347 /* The second flow pass currently (June 1999) can't update
7348 regs_ever_live without disturbing other parts of the compiler, so
7349 update it here to make the prolog/epilogue code happy. */
7350 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
7351 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
7353 current_function_uses_pic_offset_table = 1;
7355 return pic_offset_table_rtx;
7358 /* Function to init struct machine_function.
7359 This will be called, via a pointer variable,
7360 from push_function_context. */
7362 static struct machine_function *
7363 rs6000_init_machine_status ()
7365 return ggc_alloc_cleared (sizeof (machine_function));
7368 /* These macros test for integers and extract the low-order bits. */
7369 #define INT_P(X) \
7370 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
7371 && GET_MODE (X) == VOIDmode)
7373 #define INT_LOWPART(X) \
7374 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
7377 extract_MB (op)
7378 rtx op;
7380 int i;
7381 unsigned long val = INT_LOWPART (op);
7383 /* If the high bit is zero, the value is the first 1 bit we find
7384 from the left. */
7385 if ((val & 0x80000000) == 0)
7387 if ((val & 0xffffffff) == 0)
7388 abort ();
7390 i = 1;
7391 while (((val <<= 1) & 0x80000000) == 0)
7392 ++i;
7393 return i;
7396 /* If the high bit is set and the low bit is not, or the mask is all
7397 1's, the value is zero. */
7398 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
7399 return 0;
7401 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7402 from the right. */
7403 i = 31;
7404 while (((val >>= 1) & 1) != 0)
7405 --i;
7407 return i;
7411 extract_ME (op)
7412 rtx op;
7414 int i;
7415 unsigned long val = INT_LOWPART (op);
7417 /* If the low bit is zero, the value is the first 1 bit we find from
7418 the right. */
7419 if ((val & 1) == 0)
7421 if ((val & 0xffffffff) == 0)
7422 abort ();
7424 i = 30;
7425 while (((val >>= 1) & 1) == 0)
7426 --i;
7428 return i;
7431 /* If the low bit is set and the high bit is not, or the mask is all
7432 1's, the value is 31. */
7433 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
7434 return 31;
7436 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7437 from the left. */
7438 i = 0;
7439 while (((val <<= 1) & 0x80000000) != 0)
7440 ++i;
7442 return i;
7445 /* Print an operand. Recognize special options, documented below. */
7447 #if TARGET_ELF
7448 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
7449 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
7450 #else
7451 #define SMALL_DATA_RELOC "sda21"
7452 #define SMALL_DATA_REG 0
7453 #endif
7455 void
7456 print_operand (file, x, code)
7457 FILE *file;
7458 rtx x;
7459 int code;
7461 int i;
7462 HOST_WIDE_INT val;
7463 unsigned HOST_WIDE_INT uval;
7465 switch (code)
7467 case '.':
7468 /* Write out an instruction after the call which may be replaced
7469 with glue code by the loader. This depends on the AIX version. */
7470 asm_fprintf (file, RS6000_CALL_GLUE);
7471 return;
7473 /* %a is output_address. */
7475 case 'A':
7476 /* If X is a constant integer whose low-order 5 bits are zero,
7477 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
7478 in the AIX assembler where "sri" with a zero shift count
7479 writes a trash instruction. */
7480 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
7481 putc ('l', file);
7482 else
7483 putc ('r', file);
7484 return;
7486 case 'b':
7487 /* If constant, low-order 16 bits of constant, unsigned.
7488 Otherwise, write normally. */
7489 if (INT_P (x))
7490 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
7491 else
7492 print_operand (file, x, 0);
7493 return;
7495 case 'B':
7496 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
7497 for 64-bit mask direction. */
7498 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
7499 return;
7501 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
7502 output_operand. */
7504 case 'D':
7505 /* There used to be a comment for 'C' reading "This is an
7506 optional cror needed for certain floating-point
7507 comparisons. Otherwise write nothing." */
7509 /* Similar, except that this is for an scc, so we must be able to
7510 encode the test in a single bit that is one. We do the above
7511 for any LE, GE, GEU, or LEU and invert the bit for NE. */
7512 if (GET_CODE (x) == LE || GET_CODE (x) == GE
7513 || GET_CODE (x) == LEU || GET_CODE (x) == GEU)
7515 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
7517 fprintf (file, "cror %d,%d,%d\n\t", base_bit + 3,
7518 base_bit + 2,
7519 base_bit + (GET_CODE (x) == GE || GET_CODE (x) == GEU));
7522 else if (GET_CODE (x) == NE)
7524 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
7526 fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 3,
7527 base_bit + 2, base_bit + 2);
7529 else if (TARGET_SPE && TARGET_HARD_FLOAT
7530 && GET_CODE (x) == EQ
7531 && GET_MODE (XEXP (x, 0)) == CCFPmode)
7533 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
7535 fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 1,
7536 base_bit + 1, base_bit + 1);
7538 return;
7540 case 'E':
7541 /* X is a CR register. Print the number of the EQ bit of the CR */
7542 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7543 output_operand_lossage ("invalid %%E value");
7544 else
7545 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
7546 return;
7548 case 'f':
7549 /* X is a CR register. Print the shift count needed to move it
7550 to the high-order four bits. */
7551 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7552 output_operand_lossage ("invalid %%f value");
7553 else
7554 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
7555 return;
7557 case 'F':
7558 /* Similar, but print the count for the rotate in the opposite
7559 direction. */
7560 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7561 output_operand_lossage ("invalid %%F value");
7562 else
7563 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
7564 return;
7566 case 'G':
7567 /* X is a constant integer. If it is negative, print "m",
7568 otherwise print "z". This is to make an aze or ame insn. */
7569 if (GET_CODE (x) != CONST_INT)
7570 output_operand_lossage ("invalid %%G value");
7571 else if (INTVAL (x) >= 0)
7572 putc ('z', file);
7573 else
7574 putc ('m', file);
7575 return;
7577 case 'h':
7578 /* If constant, output low-order five bits. Otherwise, write
7579 normally. */
7580 if (INT_P (x))
7581 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
7582 else
7583 print_operand (file, x, 0);
7584 return;
7586 case 'H':
7587 /* If constant, output low-order six bits. Otherwise, write
7588 normally. */
7589 if (INT_P (x))
7590 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
7591 else
7592 print_operand (file, x, 0);
7593 return;
7595 case 'I':
7596 /* Print `i' if this is a constant, else nothing. */
7597 if (INT_P (x))
7598 putc ('i', file);
7599 return;
7601 case 'j':
7602 /* Write the bit number in CCR for jump. */
7603 i = ccr_bit (x, 0);
7604 if (i == -1)
7605 output_operand_lossage ("invalid %%j code");
7606 else
7607 fprintf (file, "%d", i);
7608 return;
7610 case 'J':
7611 /* Similar, but add one for shift count in rlinm for scc and pass
7612 scc flag to `ccr_bit'. */
7613 i = ccr_bit (x, 1);
7614 if (i == -1)
7615 output_operand_lossage ("invalid %%J code");
7616 else
7617 /* If we want bit 31, write a shift count of zero, not 32. */
7618 fprintf (file, "%d", i == 31 ? 0 : i + 1);
7619 return;
7621 case 'k':
7622 /* X must be a constant. Write the 1's complement of the
7623 constant. */
7624 if (! INT_P (x))
7625 output_operand_lossage ("invalid %%k value");
7626 else
7627 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
7628 return;
7630 case 'K':
7631 /* X must be a symbolic constant on ELF. Write an
7632 expression suitable for an 'addi' that adds in the low 16
7633 bits of the MEM. */
7634 if (GET_CODE (x) != CONST)
7636 print_operand_address (file, x);
7637 fputs ("@l", file);
7639 else
7641 if (GET_CODE (XEXP (x, 0)) != PLUS
7642 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
7643 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
7644 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
7645 output_operand_lossage ("invalid %%K value");
7646 print_operand_address (file, XEXP (XEXP (x, 0), 0));
7647 fputs ("@l", file);
7648 /* For GNU as, there must be a non-alphanumeric character
7649 between 'l' and the number. The '-' is added by
7650 print_operand() already. */
7651 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
7652 fputs ("+", file);
7653 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
7655 return;
7657 /* %l is output_asm_label. */
7659 case 'L':
7660 /* Write second word of DImode or DFmode reference. Works on register
7661 or non-indexed memory only. */
7662 if (GET_CODE (x) == REG)
7663 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
7664 else if (GET_CODE (x) == MEM)
7666 /* Handle possible auto-increment. Since it is pre-increment and
7667 we have already done it, we can just use an offset of word. */
7668 if (GET_CODE (XEXP (x, 0)) == PRE_INC
7669 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
7670 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
7671 UNITS_PER_WORD));
7672 else
7673 output_address (XEXP (adjust_address_nv (x, SImode,
7674 UNITS_PER_WORD),
7675 0));
7677 if (small_data_operand (x, GET_MODE (x)))
7678 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
7679 reg_names[SMALL_DATA_REG]);
7681 return;
7683 case 'm':
7684 /* MB value for a mask operand. */
7685 if (! mask_operand (x, SImode))
7686 output_operand_lossage ("invalid %%m value");
7688 fprintf (file, "%d", extract_MB (x));
7689 return;
7691 case 'M':
7692 /* ME value for a mask operand. */
7693 if (! mask_operand (x, SImode))
7694 output_operand_lossage ("invalid %%M value");
7696 fprintf (file, "%d", extract_ME (x));
7697 return;
7699 /* %n outputs the negative of its operand. */
7701 case 'N':
7702 /* Write the number of elements in the vector times 4. */
7703 if (GET_CODE (x) != PARALLEL)
7704 output_operand_lossage ("invalid %%N value");
7705 else
7706 fprintf (file, "%d", XVECLEN (x, 0) * 4);
7707 return;
7709 case 'O':
7710 /* Similar, but subtract 1 first. */
7711 if (GET_CODE (x) != PARALLEL)
7712 output_operand_lossage ("invalid %%O value");
7713 else
7714 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
7715 return;
7717 case 'p':
7718 /* X is a CONST_INT that is a power of two. Output the logarithm. */
7719 if (! INT_P (x)
7720 || INT_LOWPART (x) < 0
7721 || (i = exact_log2 (INT_LOWPART (x))) < 0)
7722 output_operand_lossage ("invalid %%p value");
7723 else
7724 fprintf (file, "%d", i);
7725 return;
7727 case 'P':
7728 /* The operand must be an indirect memory reference. The result
7729 is the register number. */
7730 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
7731 || REGNO (XEXP (x, 0)) >= 32)
7732 output_operand_lossage ("invalid %%P value");
7733 else
7734 fprintf (file, "%d", REGNO (XEXP (x, 0)));
7735 return;
7737 case 'q':
7738 /* This outputs the logical code corresponding to a boolean
7739 expression. The expression may have one or both operands
7740 negated (if one, only the first one). For condition register
7741 logical operations, it will also treat the negated
7742 CR codes as NOTs, but not handle NOTs of them. */
7744 const char *const *t = 0;
7745 const char *s;
7746 enum rtx_code code = GET_CODE (x);
7747 static const char * const tbl[3][3] = {
7748 { "and", "andc", "nor" },
7749 { "or", "orc", "nand" },
7750 { "xor", "eqv", "xor" } };
7752 if (code == AND)
7753 t = tbl[0];
7754 else if (code == IOR)
7755 t = tbl[1];
7756 else if (code == XOR)
7757 t = tbl[2];
7758 else
7759 output_operand_lossage ("invalid %%q value");
7761 if (GET_CODE (XEXP (x, 0)) != NOT)
7762 s = t[0];
7763 else
7765 if (GET_CODE (XEXP (x, 1)) == NOT)
7766 s = t[2];
7767 else
7768 s = t[1];
7771 fputs (s, file);
7773 return;
7775 case 'R':
7776 /* X is a CR register. Print the mask for `mtcrf'. */
7777 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7778 output_operand_lossage ("invalid %%R value");
7779 else
7780 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
7781 return;
7783 case 's':
7784 /* Low 5 bits of 32 - value */
7785 if (! INT_P (x))
7786 output_operand_lossage ("invalid %%s value");
7787 else
7788 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
7789 return;
7791 case 'S':
7792 /* PowerPC64 mask position. All 0's is excluded.
7793 CONST_INT 32-bit mask is considered sign-extended so any
7794 transition must occur within the CONST_INT, not on the boundary. */
7795 if (! mask64_operand (x, DImode))
7796 output_operand_lossage ("invalid %%S value");
7798 uval = INT_LOWPART (x);
7800 if (uval & 1) /* Clear Left */
7802 #if HOST_BITS_PER_WIDE_INT > 64
7803 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
7804 #endif
7805 i = 64;
7807 else /* Clear Right */
7809 uval = ~uval;
7810 #if HOST_BITS_PER_WIDE_INT > 64
7811 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
7812 #endif
7813 i = 63;
7815 while (uval != 0)
7816 --i, uval >>= 1;
7817 if (i < 0)
7818 abort ();
7819 fprintf (file, "%d", i);
7820 return;
7822 case 't':
7823 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
7824 if (GET_CODE (x) != REG || GET_MODE (x) != CCmode)
7825 abort ();
7827 /* Bit 3 is OV bit. */
7828 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
7830 /* If we want bit 31, write a shift count of zero, not 32. */
7831 fprintf (file, "%d", i == 31 ? 0 : i + 1);
7832 return;
7834 case 'T':
7835 /* Print the symbolic name of a branch target register. */
7836 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
7837 && REGNO (x) != COUNT_REGISTER_REGNUM))
7838 output_operand_lossage ("invalid %%T value");
7839 else if (REGNO (x) == LINK_REGISTER_REGNUM)
7840 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
7841 else
7842 fputs ("ctr", file);
7843 return;
7845 case 'u':
7846 /* High-order 16 bits of constant for use in unsigned operand. */
7847 if (! INT_P (x))
7848 output_operand_lossage ("invalid %%u value");
7849 else
7850 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
7851 (INT_LOWPART (x) >> 16) & 0xffff);
7852 return;
7854 case 'v':
7855 /* High-order 16 bits of constant for use in signed operand. */
7856 if (! INT_P (x))
7857 output_operand_lossage ("invalid %%v value");
7858 else
7859 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
7860 (INT_LOWPART (x) >> 16) & 0xffff);
7861 return;
7863 case 'U':
7864 /* Print `u' if this has an auto-increment or auto-decrement. */
7865 if (GET_CODE (x) == MEM
7866 && (GET_CODE (XEXP (x, 0)) == PRE_INC
7867 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
7868 putc ('u', file);
7869 return;
7871 case 'V':
7872 /* Print the trap code for this operand. */
7873 switch (GET_CODE (x))
7875 case EQ:
7876 fputs ("eq", file); /* 4 */
7877 break;
7878 case NE:
7879 fputs ("ne", file); /* 24 */
7880 break;
7881 case LT:
7882 fputs ("lt", file); /* 16 */
7883 break;
7884 case LE:
7885 fputs ("le", file); /* 20 */
7886 break;
7887 case GT:
7888 fputs ("gt", file); /* 8 */
7889 break;
7890 case GE:
7891 fputs ("ge", file); /* 12 */
7892 break;
7893 case LTU:
7894 fputs ("llt", file); /* 2 */
7895 break;
7896 case LEU:
7897 fputs ("lle", file); /* 6 */
7898 break;
7899 case GTU:
7900 fputs ("lgt", file); /* 1 */
7901 break;
7902 case GEU:
7903 fputs ("lge", file); /* 5 */
7904 break;
7905 default:
7906 abort ();
7908 break;
7910 case 'w':
7911 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
7912 normally. */
7913 if (INT_P (x))
7914 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
7915 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
7916 else
7917 print_operand (file, x, 0);
7918 return;
7920 case 'W':
7921 /* MB value for a PowerPC64 rldic operand. */
7922 val = (GET_CODE (x) == CONST_INT
7923 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
7925 if (val < 0)
7926 i = -1;
7927 else
7928 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
7929 if ((val <<= 1) < 0)
7930 break;
7932 #if HOST_BITS_PER_WIDE_INT == 32
7933 if (GET_CODE (x) == CONST_INT && i >= 0)
7934 i += 32; /* zero-extend high-part was all 0's */
7935 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
7937 val = CONST_DOUBLE_LOW (x);
7939 if (val == 0)
7940 abort ();
7941 else if (val < 0)
7942 --i;
7943 else
7944 for ( ; i < 64; i++)
7945 if ((val <<= 1) < 0)
7946 break;
7948 #endif
7950 fprintf (file, "%d", i + 1);
7951 return;
7953 case 'X':
7954 if (GET_CODE (x) == MEM
7955 && LEGITIMATE_INDEXED_ADDRESS_P (XEXP (x, 0), 0))
7956 putc ('x', file);
7957 return;
7959 case 'Y':
7960 /* Like 'L', for third word of TImode */
7961 if (GET_CODE (x) == REG)
7962 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
7963 else if (GET_CODE (x) == MEM)
7965 if (GET_CODE (XEXP (x, 0)) == PRE_INC
7966 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
7967 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
7968 else
7969 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
7970 if (small_data_operand (x, GET_MODE (x)))
7971 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
7972 reg_names[SMALL_DATA_REG]);
7974 return;
7976 case 'z':
7977 /* X is a SYMBOL_REF. Write out the name preceded by a
7978 period and without any trailing data in brackets. Used for function
7979 names. If we are configured for System V (or the embedded ABI) on
7980 the PowerPC, do not emit the period, since those systems do not use
7981 TOCs and the like. */
7982 if (GET_CODE (x) != SYMBOL_REF)
7983 abort ();
7985 if (XSTR (x, 0)[0] != '.')
7987 switch (DEFAULT_ABI)
7989 default:
7990 abort ();
7992 case ABI_AIX:
7993 putc ('.', file);
7994 break;
7996 case ABI_V4:
7997 case ABI_AIX_NODESC:
7998 case ABI_DARWIN:
7999 break;
8002 #if TARGET_AIX
8003 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
8004 #else
8005 assemble_name (file, XSTR (x, 0));
8006 #endif
8007 return;
8009 case 'Z':
8010 /* Like 'L', for last word of TImode. */
8011 if (GET_CODE (x) == REG)
8012 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
8013 else if (GET_CODE (x) == MEM)
8015 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8016 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8017 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
8018 else
8019 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
8020 if (small_data_operand (x, GET_MODE (x)))
8021 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8022 reg_names[SMALL_DATA_REG]);
8024 return;
8026 /* Print AltiVec or SPE memory operand. */
8027 case 'y':
8029 rtx tmp;
8031 if (GET_CODE (x) != MEM)
8032 abort ();
8034 tmp = XEXP (x, 0);
8036 if (TARGET_SPE)
8038 /* Handle [reg]. */
8039 if (GET_CODE (tmp) == REG)
8041 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
8042 break;
8044 /* Handle [reg+UIMM]. */
8045 else if (GET_CODE (tmp) == PLUS &&
8046 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
8048 int x;
8050 if (GET_CODE (XEXP (tmp, 0)) != REG)
8051 abort ();
8053 x = INTVAL (XEXP (tmp, 1));
8054 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
8055 break;
8058 /* Fall through. Must be [reg+reg]. */
8060 if (GET_CODE (tmp) == REG)
8061 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
8062 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
8064 if (REGNO (XEXP (tmp, 0)) == 0)
8065 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
8066 reg_names[ REGNO (XEXP (tmp, 0)) ]);
8067 else
8068 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
8069 reg_names[ REGNO (XEXP (tmp, 1)) ]);
8071 else
8072 abort ();
8073 break;
8076 case 0:
8077 if (GET_CODE (x) == REG)
8078 fprintf (file, "%s", reg_names[REGNO (x)]);
8079 else if (GET_CODE (x) == MEM)
8081 /* We need to handle PRE_INC and PRE_DEC here, since we need to
8082 know the width from the mode. */
8083 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
8084 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
8085 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
8086 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
8087 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
8088 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
8089 else
8090 output_address (XEXP (x, 0));
8092 else
8093 output_addr_const (file, x);
8094 return;
8096 default:
8097 output_operand_lossage ("invalid %%xn code");
8101 /* Print the address of an operand. */
8103 void
8104 print_operand_address (file, x)
8105 FILE *file;
8106 rtx x;
8108 if (GET_CODE (x) == REG)
8109 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
8110 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
8111 || GET_CODE (x) == LABEL_REF)
8113 output_addr_const (file, x);
8114 if (small_data_operand (x, GET_MODE (x)))
8115 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8116 reg_names[SMALL_DATA_REG]);
8117 else if (TARGET_TOC)
8118 abort ();
8120 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
8122 if (REGNO (XEXP (x, 0)) == 0)
8123 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
8124 reg_names[ REGNO (XEXP (x, 0)) ]);
8125 else
8126 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
8127 reg_names[ REGNO (XEXP (x, 1)) ]);
8129 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
8131 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (XEXP (x, 1)));
8132 fprintf (file, "(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8134 #if TARGET_ELF
8135 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
8136 && CONSTANT_P (XEXP (x, 1)))
8138 output_addr_const (file, XEXP (x, 1));
8139 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8141 #endif
8142 #if TARGET_MACHO
8143 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
8144 && CONSTANT_P (XEXP (x, 1)))
8146 fprintf (file, "lo16(");
8147 output_addr_const (file, XEXP (x, 1));
8148 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8150 #endif
8151 else if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
8153 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
8155 rtx contains_minus = XEXP (x, 1);
8156 rtx minus, symref;
8157 const char *name;
8159 /* Find the (minus (sym) (toc)) buried in X, and temporarily
8160 turn it into (sym) for output_addr_const. */
8161 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
8162 contains_minus = XEXP (contains_minus, 0);
8164 minus = XEXP (contains_minus, 0);
8165 symref = XEXP (minus, 0);
8166 XEXP (contains_minus, 0) = symref;
8167 if (TARGET_ELF)
8169 char *newname;
8171 name = XSTR (symref, 0);
8172 newname = alloca (strlen (name) + sizeof ("@toc"));
8173 strcpy (newname, name);
8174 strcat (newname, "@toc");
8175 XSTR (symref, 0) = newname;
8177 output_addr_const (file, XEXP (x, 1));
8178 if (TARGET_ELF)
8179 XSTR (symref, 0) = name;
8180 XEXP (contains_minus, 0) = minus;
8182 else
8183 output_addr_const (file, XEXP (x, 1));
8185 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
8187 else
8188 abort ();
8191 /* Target hook for assembling integer objects. The PowerPC version has
8192 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
8193 is defined. It also needs to handle DI-mode objects on 64-bit
8194 targets. */
8196 static bool
8197 rs6000_assemble_integer (x, size, aligned_p)
8198 rtx x;
8199 unsigned int size;
8200 int aligned_p;
8202 #ifdef RELOCATABLE_NEEDS_FIXUP
8203 /* Special handling for SI values. */
8204 if (size == 4 && aligned_p)
8206 extern int in_toc_section PARAMS ((void));
8207 static int recurse = 0;
8209 /* For -mrelocatable, we mark all addresses that need to be fixed up
8210 in the .fixup section. */
8211 if (TARGET_RELOCATABLE
8212 && !in_toc_section ()
8213 && !in_text_section ()
8214 && !recurse
8215 && GET_CODE (x) != CONST_INT
8216 && GET_CODE (x) != CONST_DOUBLE
8217 && CONSTANT_P (x))
8219 char buf[256];
8221 recurse = 1;
8222 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
8223 fixuplabelno++;
8224 ASM_OUTPUT_LABEL (asm_out_file, buf);
8225 fprintf (asm_out_file, "\t.long\t(");
8226 output_addr_const (asm_out_file, x);
8227 fprintf (asm_out_file, ")@fixup\n");
8228 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
8229 ASM_OUTPUT_ALIGN (asm_out_file, 2);
8230 fprintf (asm_out_file, "\t.long\t");
8231 assemble_name (asm_out_file, buf);
8232 fprintf (asm_out_file, "\n\t.previous\n");
8233 recurse = 0;
8234 return true;
8236 /* Remove initial .'s to turn a -mcall-aixdesc function
8237 address into the address of the descriptor, not the function
8238 itself. */
8239 else if (GET_CODE (x) == SYMBOL_REF
8240 && XSTR (x, 0)[0] == '.'
8241 && DEFAULT_ABI == ABI_AIX)
8243 const char *name = XSTR (x, 0);
8244 while (*name == '.')
8245 name++;
8247 fprintf (asm_out_file, "\t.long\t%s\n", name);
8248 return true;
8251 #endif /* RELOCATABLE_NEEDS_FIXUP */
8252 return default_assemble_integer (x, size, aligned_p);
8255 #ifdef HAVE_GAS_HIDDEN
8256 /* Emit an assembler directive to set symbol visibility for DECL to
8257 VISIBILITY_TYPE. */
8259 static void
8260 rs6000_assemble_visibility (decl, vis)
8261 tree decl;
8262 int vis;
8264 /* Functions need to have their entry point symbol visibility set as
8265 well as their descriptor symbol visibility. */
8266 if (DEFAULT_ABI == ABI_AIX && TREE_CODE (decl) == FUNCTION_DECL)
8268 static const char * const visibility_types[] = {
8269 NULL, "internal", "hidden", "protected"
8272 const char *name, *type;
8274 name = ((* targetm.strip_name_encoding)
8275 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
8276 type = visibility_types[vis];
8278 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
8279 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
8281 else
8282 default_assemble_visibility (decl, vis);
8284 #endif
8286 enum rtx_code
8287 rs6000_reverse_condition (mode, code)
8288 enum machine_mode mode;
8289 enum rtx_code code;
8291 /* Reversal of FP compares takes care -- an ordered compare
8292 becomes an unordered compare and vice versa. */
8293 if (mode == CCFPmode && !flag_unsafe_math_optimizations)
8294 return reverse_condition_maybe_unordered (code);
8295 else
8296 return reverse_condition (code);
8299 /* Generate a compare for CODE. Return a brand-new rtx that
8300 represents the result of the compare. */
8302 static rtx
8303 rs6000_generate_compare (code)
8304 enum rtx_code code;
8306 enum machine_mode comp_mode;
8307 rtx compare_result;
8309 if (rs6000_compare_fp_p)
8310 comp_mode = CCFPmode;
8311 else if (code == GTU || code == LTU
8312 || code == GEU || code == LEU)
8313 comp_mode = CCUNSmode;
8314 else
8315 comp_mode = CCmode;
8317 /* First, the compare. */
8318 compare_result = gen_reg_rtx (comp_mode);
8320 /* SPE FP compare instructions on the GPRs. Yuck! */
8321 if ((TARGET_SPE && TARGET_HARD_FLOAT) && rs6000_compare_fp_p)
8323 rtx cmp, or1, or2, or_result, compare_result2;
8325 switch (code)
8327 case EQ:
8328 case UNEQ:
8329 case NE:
8330 case LTGT:
8331 cmp = flag_unsafe_math_optimizations
8332 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
8333 rs6000_compare_op1)
8334 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
8335 rs6000_compare_op1);
8336 break;
8337 case GT:
8338 case GTU:
8339 case UNGT:
8340 case UNGE:
8341 case GE:
8342 case GEU:
8343 cmp = flag_unsafe_math_optimizations
8344 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
8345 rs6000_compare_op1)
8346 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
8347 rs6000_compare_op1);
8348 break;
8349 case LT:
8350 case LTU:
8351 case UNLT:
8352 case UNLE:
8353 case LE:
8354 case LEU:
8355 cmp = flag_unsafe_math_optimizations
8356 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
8357 rs6000_compare_op1)
8358 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
8359 rs6000_compare_op1);
8360 break;
8361 default:
8362 abort ();
8365 /* Synthesize LE and GE from LT/GT || EQ. */
8366 if (code == LE || code == GE || code == LEU || code == GEU)
8368 /* Synthesize GE/LE frome GT/LT || EQ. */
8370 emit_insn (cmp);
8372 switch (code)
8374 case LE: code = LT; break;
8375 case GE: code = GT; break;
8376 case LEU: code = LT; break;
8377 case GEU: code = GT; break;
8378 default: abort ();
8381 or1 = gen_reg_rtx (SImode);
8382 or2 = gen_reg_rtx (SImode);
8383 or_result = gen_reg_rtx (CCEQmode);
8384 compare_result2 = gen_reg_rtx (CCFPmode);
8386 /* Do the EQ. */
8387 cmp = flag_unsafe_math_optimizations
8388 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
8389 rs6000_compare_op1)
8390 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
8391 rs6000_compare_op1);
8392 emit_insn (cmp);
8394 /* The MC8540 FP compare instructions set the CR bits
8395 differently than other PPC compare instructions. For
8396 that matter, there is no generic test instruction, but a
8397 testgt, testlt, and testeq. For a true condition, bit 2
8398 is set (x1xx) in the CR. Following the traditional CR
8399 values:
8401 LT GT EQ OV
8402 bit3 bit2 bit1 bit0
8404 ... bit 2 would be a GT CR alias, so later on we
8405 look in the GT bits for the branch instructions.
8406 However, we must be careful to emit correct RTL in
8407 the meantime, so optimizations don't get confused. */
8409 or1 = gen_rtx (NE, SImode, compare_result, const0_rtx);
8410 or2 = gen_rtx (NE, SImode, compare_result2, const0_rtx);
8412 /* OR them together. */
8413 cmp = gen_rtx_SET (VOIDmode, or_result,
8414 gen_rtx_COMPARE (CCEQmode,
8415 gen_rtx_IOR (SImode, or1, or2),
8416 const_true_rtx));
8417 compare_result = or_result;
8418 code = EQ;
8420 else
8422 /* We only care about 1 bit (x1xx), so map everything to NE to
8423 maintain rtl sanity. We'll get to the right bit (x1xx) at
8424 code output time. */
8425 if (code == NE || code == LTGT)
8426 /* Do the inverse here because we have no cmpne
8427 instruction. We use the cmpeq instruction and expect
8428 to get a 0 instead. */
8429 code = EQ;
8430 else
8431 code = NE;
8434 emit_insn (cmp);
8436 else
8437 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
8438 gen_rtx_COMPARE (comp_mode,
8439 rs6000_compare_op0,
8440 rs6000_compare_op1)));
8442 /* Some kinds of FP comparisons need an OR operation;
8443 except for flag_unsafe_math_optimizations we don't bother. */
8444 if (rs6000_compare_fp_p
8445 && ! flag_unsafe_math_optimizations
8446 && ! (TARGET_HARD_FLOAT && TARGET_SPE)
8447 && (code == LE || code == GE
8448 || code == UNEQ || code == LTGT
8449 || code == UNGT || code == UNLT))
8451 enum rtx_code or1, or2;
8452 rtx or1_rtx, or2_rtx, compare2_rtx;
8453 rtx or_result = gen_reg_rtx (CCEQmode);
8455 switch (code)
8457 case LE: or1 = LT; or2 = EQ; break;
8458 case GE: or1 = GT; or2 = EQ; break;
8459 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
8460 case LTGT: or1 = LT; or2 = GT; break;
8461 case UNGT: or1 = UNORDERED; or2 = GT; break;
8462 case UNLT: or1 = UNORDERED; or2 = LT; break;
8463 default: abort ();
8465 validate_condition_mode (or1, comp_mode);
8466 validate_condition_mode (or2, comp_mode);
8467 or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
8468 or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
8469 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
8470 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
8471 const_true_rtx);
8472 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
8474 compare_result = or_result;
8475 code = EQ;
8478 validate_condition_mode (code, GET_MODE (compare_result));
8480 return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
8484 /* Emit the RTL for an sCOND pattern. */
8486 void
8487 rs6000_emit_sCOND (code, result)
8488 enum rtx_code code;
8489 rtx result;
8491 rtx condition_rtx;
8492 enum machine_mode op_mode;
8494 condition_rtx = rs6000_generate_compare (code);
8496 op_mode = GET_MODE (rs6000_compare_op0);
8497 if (op_mode == VOIDmode)
8498 op_mode = GET_MODE (rs6000_compare_op1);
8500 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
8502 PUT_MODE (condition_rtx, DImode);
8503 convert_move (result, condition_rtx, 0);
8505 else
8507 PUT_MODE (condition_rtx, SImode);
8508 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
8512 /* Emit a branch of kind CODE to location LOC. */
8514 void
8515 rs6000_emit_cbranch (code, loc)
8516 enum rtx_code code;
8517 rtx loc;
8519 rtx condition_rtx, loc_ref;
8521 condition_rtx = rs6000_generate_compare (code);
8522 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
8523 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
8524 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
8525 loc_ref, pc_rtx)));
8528 /* Return the string to output a conditional branch to LABEL, which is
8529 the operand number of the label, or -1 if the branch is really a
8530 conditional return.
8532 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
8533 condition code register and its mode specifies what kind of
8534 comparison we made.
8536 REVERSED is nonzero if we should reverse the sense of the comparison.
8538 INSN is the insn. */
8540 char *
8541 output_cbranch (op, label, reversed, insn)
8542 rtx op;
8543 const char * label;
8544 int reversed;
8545 rtx insn;
8547 static char string[64];
8548 enum rtx_code code = GET_CODE (op);
8549 rtx cc_reg = XEXP (op, 0);
8550 enum machine_mode mode = GET_MODE (cc_reg);
8551 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
8552 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
8553 int really_reversed = reversed ^ need_longbranch;
8554 char *s = string;
8555 const char *ccode;
8556 const char *pred;
8557 rtx note;
8559 validate_condition_mode (code, mode);
8561 /* Work out which way this really branches. We could use
8562 reverse_condition_maybe_unordered here always but this
8563 makes the resulting assembler clearer. */
8564 if (really_reversed)
8566 /* Reversal of FP compares takes care -- an ordered compare
8567 becomes an unordered compare and vice versa. */
8568 if (mode == CCFPmode)
8569 code = reverse_condition_maybe_unordered (code);
8570 else
8571 code = reverse_condition (code);
8574 if ((TARGET_SPE && TARGET_HARD_FLOAT) && mode == CCFPmode)
8576 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
8577 to the GT bit. */
8578 if (code == EQ)
8579 /* Opposite of GT. */
8580 code = UNLE;
8581 else if (code == NE)
8582 code = GT;
8583 else
8584 abort ();
8587 switch (code)
8589 /* Not all of these are actually distinct opcodes, but
8590 we distinguish them for clarity of the resulting assembler. */
8591 case NE: case LTGT:
8592 ccode = "ne"; break;
8593 case EQ: case UNEQ:
8594 ccode = "eq"; break;
8595 case GE: case GEU:
8596 ccode = "ge"; break;
8597 case GT: case GTU: case UNGT:
8598 ccode = "gt"; break;
8599 case LE: case LEU:
8600 ccode = "le"; break;
8601 case LT: case LTU: case UNLT:
8602 ccode = "lt"; break;
8603 case UNORDERED: ccode = "un"; break;
8604 case ORDERED: ccode = "nu"; break;
8605 case UNGE: ccode = "nl"; break;
8606 case UNLE: ccode = "ng"; break;
8607 default:
8608 abort ();
8611 /* Maybe we have a guess as to how likely the branch is.
8612 The old mnemonics don't have a way to specify this information. */
8613 pred = "";
8614 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
8615 if (note != NULL_RTX)
8617 /* PROB is the difference from 50%. */
8618 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
8619 bool always_hint = rs6000_cpu != PROCESSOR_POWER4;
8621 /* Only hint for highly probable/improbable branches on newer
8622 cpus as static prediction overrides processor dynamic
8623 prediction. For older cpus we may as well always hint, but
8624 assume not taken for branches that are very close to 50% as a
8625 mispredicted taken branch is more expensive than a
8626 mispredicted not-taken branch. */
8627 if (always_hint
8628 || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
8630 if (abs (prob) > REG_BR_PROB_BASE / 20
8631 && ((prob > 0) ^ need_longbranch))
8632 pred = "+";
8633 else
8634 pred = "-";
8638 if (label == NULL)
8639 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
8640 else
8641 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
8643 /* We need to escape any '%' characters in the reg_names string.
8644 Assume they'd only be the first character... */
8645 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
8646 *s++ = '%';
8647 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
8649 if (label != NULL)
8651 /* If the branch distance was too far, we may have to use an
8652 unconditional branch to go the distance. */
8653 if (need_longbranch)
8654 s += sprintf (s, ",$+8\n\tb %s", label);
8655 else
8656 s += sprintf (s, ",%s", label);
8659 return string;
8662 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
8663 operands of the last comparison is nonzero/true, FALSE_COND if it
8664 is zero/false. Return 0 if the hardware has no such operation. */
8667 rs6000_emit_cmove (dest, op, true_cond, false_cond)
8668 rtx dest;
8669 rtx op;
8670 rtx true_cond;
8671 rtx false_cond;
8673 enum rtx_code code = GET_CODE (op);
8674 rtx op0 = rs6000_compare_op0;
8675 rtx op1 = rs6000_compare_op1;
8676 REAL_VALUE_TYPE c1;
8677 enum machine_mode compare_mode = GET_MODE (op0);
8678 enum machine_mode result_mode = GET_MODE (dest);
8679 rtx temp;
8681 /* These modes should always match. */
8682 if (GET_MODE (op1) != compare_mode
8683 /* In the isel case however, we can use a compare immediate, so
8684 op1 may be a small constant. */
8685 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
8686 return 0;
8687 if (GET_MODE (true_cond) != result_mode)
8688 return 0;
8689 if (GET_MODE (false_cond) != result_mode)
8690 return 0;
8692 /* First, work out if the hardware can do this at all, or
8693 if it's too slow... */
8694 if (! rs6000_compare_fp_p)
8696 if (TARGET_ISEL)
8697 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
8698 return 0;
8701 /* Eliminate half of the comparisons by switching operands, this
8702 makes the remaining code simpler. */
8703 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
8704 || code == LTGT || code == LT)
8706 code = reverse_condition_maybe_unordered (code);
8707 temp = true_cond;
8708 true_cond = false_cond;
8709 false_cond = temp;
8712 /* UNEQ and LTGT take four instructions for a comparison with zero,
8713 it'll probably be faster to use a branch here too. */
8714 if (code == UNEQ)
8715 return 0;
8717 if (GET_CODE (op1) == CONST_DOUBLE)
8718 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
8720 /* We're going to try to implement comparisons by performing
8721 a subtract, then comparing against zero. Unfortunately,
8722 Inf - Inf is NaN which is not zero, and so if we don't
8723 know that the operand is finite and the comparison
8724 would treat EQ different to UNORDERED, we can't do it. */
8725 if (! flag_unsafe_math_optimizations
8726 && code != GT && code != UNGE
8727 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
8728 /* Constructs of the form (a OP b ? a : b) are safe. */
8729 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
8730 || (! rtx_equal_p (op0, true_cond)
8731 && ! rtx_equal_p (op1, true_cond))))
8732 return 0;
8733 /* At this point we know we can use fsel. */
8735 /* Reduce the comparison to a comparison against zero. */
8736 temp = gen_reg_rtx (compare_mode);
8737 emit_insn (gen_rtx_SET (VOIDmode, temp,
8738 gen_rtx_MINUS (compare_mode, op0, op1)));
8739 op0 = temp;
8740 op1 = CONST0_RTX (compare_mode);
8742 /* If we don't care about NaNs we can reduce some of the comparisons
8743 down to faster ones. */
8744 if (flag_unsafe_math_optimizations)
8745 switch (code)
8747 case GT:
8748 code = LE;
8749 temp = true_cond;
8750 true_cond = false_cond;
8751 false_cond = temp;
8752 break;
8753 case UNGE:
8754 code = GE;
8755 break;
8756 case UNEQ:
8757 code = EQ;
8758 break;
8759 default:
8760 break;
8763 /* Now, reduce everything down to a GE. */
8764 switch (code)
8766 case GE:
8767 break;
8769 case LE:
8770 temp = gen_reg_rtx (compare_mode);
8771 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
8772 op0 = temp;
8773 break;
8775 case ORDERED:
8776 temp = gen_reg_rtx (compare_mode);
8777 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
8778 op0 = temp;
8779 break;
8781 case EQ:
8782 temp = gen_reg_rtx (compare_mode);
8783 emit_insn (gen_rtx_SET (VOIDmode, temp,
8784 gen_rtx_NEG (compare_mode,
8785 gen_rtx_ABS (compare_mode, op0))));
8786 op0 = temp;
8787 break;
8789 case UNGE:
8790 temp = gen_reg_rtx (result_mode);
8791 emit_insn (gen_rtx_SET (VOIDmode, temp,
8792 gen_rtx_IF_THEN_ELSE (result_mode,
8793 gen_rtx_GE (VOIDmode,
8794 op0, op1),
8795 true_cond, false_cond)));
8796 false_cond = temp;
8797 true_cond = false_cond;
8799 temp = gen_reg_rtx (compare_mode);
8800 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
8801 op0 = temp;
8802 break;
8804 case GT:
8805 temp = gen_reg_rtx (result_mode);
8806 emit_insn (gen_rtx_SET (VOIDmode, temp,
8807 gen_rtx_IF_THEN_ELSE (result_mode,
8808 gen_rtx_GE (VOIDmode,
8809 op0, op1),
8810 true_cond, false_cond)));
8811 true_cond = temp;
8812 false_cond = true_cond;
8814 temp = gen_reg_rtx (compare_mode);
8815 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
8816 op0 = temp;
8817 break;
8819 default:
8820 abort ();
8823 emit_insn (gen_rtx_SET (VOIDmode, dest,
8824 gen_rtx_IF_THEN_ELSE (result_mode,
8825 gen_rtx_GE (VOIDmode,
8826 op0, op1),
8827 true_cond, false_cond)));
8828 return 1;
8831 /* Same as above, but for ints (isel). */
8833 static int
8834 rs6000_emit_int_cmove (dest, op, true_cond, false_cond)
8835 rtx dest;
8836 rtx op;
8837 rtx true_cond;
8838 rtx false_cond;
8840 rtx condition_rtx, cr;
8842 /* All isel implementations thus far are 32-bits. */
8843 if (GET_MODE (rs6000_compare_op0) != SImode)
8844 return 0;
8846 /* We still have to do the compare, because isel doesn't do a
8847 compare, it just looks at the CRx bits set by a previous compare
8848 instruction. */
8849 condition_rtx = rs6000_generate_compare (GET_CODE (op));
8850 cr = XEXP (condition_rtx, 0);
8852 if (GET_MODE (cr) == CCmode)
8853 emit_insn (gen_isel_signed (dest, condition_rtx,
8854 true_cond, false_cond, cr));
8855 else
8856 emit_insn (gen_isel_unsigned (dest, condition_rtx,
8857 true_cond, false_cond, cr));
8859 return 1;
8862 const char *
8863 output_isel (operands)
8864 rtx *operands;
8866 enum rtx_code code;
8868 code = GET_CODE (operands[1]);
8869 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
8871 PUT_CODE (operands[1], reverse_condition (code));
8872 return "isel %0,%3,%2,%j1";
8874 else
8875 return "isel %0,%2,%3,%j1";
8878 void
8879 rs6000_emit_minmax (dest, code, op0, op1)
8880 rtx dest;
8881 enum rtx_code code;
8882 rtx op0;
8883 rtx op1;
8885 enum machine_mode mode = GET_MODE (op0);
8886 enum rtx_code c;
8887 rtx target;
8889 if (code == SMAX || code == SMIN)
8890 c = GE;
8891 else
8892 c = GEU;
8894 if (code == SMAX || code == UMAX)
8895 target = emit_conditional_move (dest, c, op0, op1, mode,
8896 op0, op1, mode, 0);
8897 else
8898 target = emit_conditional_move (dest, c, op0, op1, mode,
8899 op1, op0, mode, 0);
8900 if (target == NULL_RTX)
8901 abort ();
8902 if (target != dest)
8903 emit_move_insn (dest, target);
8906 /* This page contains routines that are used to determine what the
8907 function prologue and epilogue code will do and write them out. */
8909 /* Return the first fixed-point register that is required to be
8910 saved. 32 if none. */
8913 first_reg_to_save ()
8915 int first_reg;
8917 /* Find lowest numbered live register. */
8918 for (first_reg = 13; first_reg <= 31; first_reg++)
8919 if (regs_ever_live[first_reg]
8920 && (! call_used_regs[first_reg]
8921 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
8922 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
8923 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
8924 break;
8926 #if TARGET_MACHO
8927 if (flag_pic
8928 && current_function_uses_pic_offset_table
8929 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
8930 return RS6000_PIC_OFFSET_TABLE_REGNUM;
8931 #endif
8933 return first_reg;
8936 /* Similar, for FP regs. */
8939 first_fp_reg_to_save ()
8941 int first_reg;
8943 /* Find lowest numbered live register. */
8944 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
8945 if (regs_ever_live[first_reg])
8946 break;
8948 return first_reg;
8951 /* Similar, for AltiVec regs. */
8953 static int
8954 first_altivec_reg_to_save ()
8956 int i;
8958 /* Stack frame remains as is unless we are in AltiVec ABI. */
8959 if (! TARGET_ALTIVEC_ABI)
8960 return LAST_ALTIVEC_REGNO + 1;
8962 /* Find lowest numbered live register. */
8963 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
8964 if (regs_ever_live[i])
8965 break;
8967 return i;
8970 /* Return a 32-bit mask of the AltiVec registers we need to set in
8971 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
8972 the 32-bit word is 0. */
8974 static unsigned int
8975 compute_vrsave_mask ()
8977 unsigned int i, mask = 0;
8979 /* First, find out if we use _any_ altivec registers. */
8980 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
8981 if (regs_ever_live[i])
8982 mask |= ALTIVEC_REG_BIT (i);
8984 if (mask == 0)
8985 return mask;
8987 /* Next, add all registers that are call-clobbered. We do this
8988 because post-reload register optimizers such as regrename_optimize
8989 may choose to use them. They never change the register class
8990 chosen by reload, so cannot create new uses of altivec registers
8991 if there were none before, so the early exit above is safe. */
8992 /* ??? Alternately, we could define HARD_REGNO_RENAME_OK to disallow
8993 altivec registers not saved in the mask, which might well make the
8994 adjustments below more effective in eliding the save/restore of
8995 VRSAVE in small functions. */
8996 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
8997 if (call_used_regs[i])
8998 mask |= ALTIVEC_REG_BIT (i);
9000 /* Next, remove the argument registers from the set. These must
9001 be in the VRSAVE mask set by the caller, so we don't need to add
9002 them in again. More importantly, the mask we compute here is
9003 used to generate CLOBBERs in the set_vrsave insn, and we do not
9004 wish the argument registers to die. */
9005 for (i = cfun->args_info.vregno; i >= ALTIVEC_ARG_MIN_REG; --i)
9006 mask &= ~ALTIVEC_REG_BIT (i);
9008 /* Similarly, remove the return value from the set. */
9010 bool yes = false;
9011 diddle_return_value (is_altivec_return_reg, &yes);
9012 if (yes)
9013 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
9016 return mask;
9019 static void
9020 is_altivec_return_reg (reg, xyes)
9021 rtx reg;
9022 void *xyes;
9024 bool *yes = (bool *) xyes;
9025 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
9026 *yes = true;
9030 /* Calculate the stack information for the current function. This is
9031 complicated by having two separate calling sequences, the AIX calling
9032 sequence and the V.4 calling sequence.
9034 AIX (and Darwin/Mac OS X) stack frames look like:
9035 32-bit 64-bit
9036 SP----> +---------------------------------------+
9037 | back chain to caller | 0 0
9038 +---------------------------------------+
9039 | saved CR | 4 8 (8-11)
9040 +---------------------------------------+
9041 | saved LR | 8 16
9042 +---------------------------------------+
9043 | reserved for compilers | 12 24
9044 +---------------------------------------+
9045 | reserved for binders | 16 32
9046 +---------------------------------------+
9047 | saved TOC pointer | 20 40
9048 +---------------------------------------+
9049 | Parameter save area (P) | 24 48
9050 +---------------------------------------+
9051 | Alloca space (A) | 24+P etc.
9052 +---------------------------------------+
9053 | Local variable space (L) | 24+P+A
9054 +---------------------------------------+
9055 | Float/int conversion temporary (X) | 24+P+A+L
9056 +---------------------------------------+
9057 | Save area for AltiVec registers (W) | 24+P+A+L+X
9058 +---------------------------------------+
9059 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
9060 +---------------------------------------+
9061 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
9062 +---------------------------------------+
9063 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
9064 +---------------------------------------+
9065 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
9066 +---------------------------------------+
9067 old SP->| back chain to caller's caller |
9068 +---------------------------------------+
9070 The required alignment for AIX configurations is two words (i.e., 8
9071 or 16 bytes).
9074 V.4 stack frames look like:
9076 SP----> +---------------------------------------+
9077 | back chain to caller | 0
9078 +---------------------------------------+
9079 | caller's saved LR | 4
9080 +---------------------------------------+
9081 | Parameter save area (P) | 8
9082 +---------------------------------------+
9083 | Alloca space (A) | 8+P
9084 +---------------------------------------+
9085 | Varargs save area (V) | 8+P+A
9086 +---------------------------------------+
9087 | Local variable space (L) | 8+P+A+V
9088 +---------------------------------------+
9089 | Float/int conversion temporary (X) | 8+P+A+V+L
9090 +---------------------------------------+
9091 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
9092 +---------------------------------------+
9093 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
9094 +---------------------------------------+
9095 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
9096 +---------------------------------------+
9097 | SPE: area for 64-bit GP registers |
9098 +---------------------------------------+
9099 | SPE alignment padding |
9100 +---------------------------------------+
9101 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
9102 +---------------------------------------+
9103 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
9104 +---------------------------------------+
9105 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
9106 +---------------------------------------+
9107 old SP->| back chain to caller's caller |
9108 +---------------------------------------+
9110 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
9111 given. (But note below and in sysv4.h that we require only 8 and
9112 may round up the size of our stack frame anyways. The historical
9113 reason is early versions of powerpc-linux which didn't properly
9114 align the stack at program startup. A happy side-effect is that
9115 -mno-eabi libraries can be used with -meabi programs.)
9117 The EABI configuration defaults to the V.4 layout, unless
9118 -mcall-aix is used, in which case the AIX layout is used. However,
9119 the stack alignment requirements may differ. If -mno-eabi is not
9120 given, the required stack alignment is 8 bytes; if -mno-eabi is
9121 given, the required alignment is 16 bytes. (But see V.4 comment
9122 above.) */
9124 #ifndef ABI_STACK_BOUNDARY
9125 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
9126 #endif
9128 rs6000_stack_t *
9129 rs6000_stack_info ()
9131 static rs6000_stack_t info, zero_info;
9132 rs6000_stack_t *info_ptr = &info;
9133 int reg_size = TARGET_POWERPC64 ? 8 : 4;
9134 enum rs6000_abi abi;
9135 int ehrd_size;
9136 int total_raw_size;
9138 /* Zero all fields portably. */
9139 info = zero_info;
9141 /* Select which calling sequence. */
9142 info_ptr->abi = abi = DEFAULT_ABI;
9144 /* Calculate which registers need to be saved & save area size. */
9145 info_ptr->first_gp_reg_save = first_reg_to_save ();
9146 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
9147 even if it currently looks like we won't. */
9148 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
9149 || (flag_pic == 1 && abi == ABI_V4)
9150 || (flag_pic && abi == ABI_DARWIN))
9151 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
9152 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
9153 else
9154 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
9156 /* For the SPE, we have an additional upper 32-bits on each GPR.
9157 Ideally we should save the entire 64-bits only when the upper
9158 half is used in SIMD instructions. Since we only record
9159 registers live (not the size they are used in), this proves
9160 difficult because we'd have to traverse the instruction chain at
9161 the right time, taking reload into account. This is a real pain,
9162 so we opt to save the GPRs in 64-bits always. Anyone overly
9163 concerned with frame size can fix this. ;-).
9165 So... since we save all GPRs (except the SP) in 64-bits, the
9166 traditional GP save area will be empty. */
9167 if (TARGET_SPE_ABI)
9168 info_ptr->gp_size = 0;
9170 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
9171 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
9173 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
9174 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
9175 - info_ptr->first_altivec_reg_save);
9177 /* Does this function call anything? */
9178 info_ptr->calls_p = (! current_function_is_leaf
9179 || cfun->machine->ra_needs_full_frame);
9181 /* Determine if we need to save the link register. */
9182 if (rs6000_ra_ever_killed ()
9183 || (DEFAULT_ABI == ABI_AIX && current_function_profile)
9184 #ifdef TARGET_RELOCATABLE
9185 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
9186 #endif
9187 || (info_ptr->first_fp_reg_save != 64
9188 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
9189 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
9190 || (abi == ABI_V4 && current_function_calls_alloca)
9191 || (DEFAULT_ABI == ABI_DARWIN
9192 && flag_pic
9193 && current_function_uses_pic_offset_table)
9194 || info_ptr->calls_p)
9196 info_ptr->lr_save_p = 1;
9197 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
9200 /* Determine if we need to save the condition code registers. */
9201 if (regs_ever_live[CR2_REGNO]
9202 || regs_ever_live[CR3_REGNO]
9203 || regs_ever_live[CR4_REGNO])
9205 info_ptr->cr_save_p = 1;
9206 if (abi == ABI_V4)
9207 info_ptr->cr_size = reg_size;
9210 /* If the current function calls __builtin_eh_return, then we need
9211 to allocate stack space for registers that will hold data for
9212 the exception handler. */
9213 if (current_function_calls_eh_return)
9215 unsigned int i;
9216 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
9217 continue;
9219 /* SPE saves EH registers in 64-bits. */
9220 ehrd_size = i * (TARGET_SPE_ABI ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
9222 else
9223 ehrd_size = 0;
9225 /* Determine various sizes. */
9226 info_ptr->reg_size = reg_size;
9227 info_ptr->fixed_size = RS6000_SAVE_AREA;
9228 info_ptr->varargs_size = RS6000_VARARGS_AREA;
9229 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
9230 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
9233 if (TARGET_SPE_ABI)
9234 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
9235 else
9236 info_ptr->spe_gp_size = 0;
9238 if (TARGET_ALTIVEC_ABI && TARGET_ALTIVEC_VRSAVE)
9240 info_ptr->vrsave_mask = compute_vrsave_mask ();
9241 info_ptr->vrsave_size = info_ptr->vrsave_mask ? 4 : 0;
9243 else
9245 info_ptr->vrsave_mask = 0;
9246 info_ptr->vrsave_size = 0;
9249 /* Calculate the offsets. */
9250 switch (abi)
9252 case ABI_NONE:
9253 default:
9254 abort ();
9256 case ABI_AIX:
9257 case ABI_AIX_NODESC:
9258 case ABI_DARWIN:
9259 info_ptr->fp_save_offset = - info_ptr->fp_size;
9260 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
9262 if (TARGET_ALTIVEC_ABI)
9264 info_ptr->vrsave_save_offset
9265 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
9267 /* Align stack so vector save area is on a quadword boundary. */
9268 if (info_ptr->altivec_size != 0)
9269 info_ptr->altivec_padding_size
9270 = 16 - (-info_ptr->vrsave_save_offset % 16);
9271 else
9272 info_ptr->altivec_padding_size = 0;
9274 info_ptr->altivec_save_offset
9275 = info_ptr->vrsave_save_offset
9276 - info_ptr->altivec_padding_size
9277 - info_ptr->altivec_size;
9279 /* Adjust for AltiVec case. */
9280 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
9282 else
9283 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
9284 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
9285 info_ptr->lr_save_offset = 2*reg_size;
9286 break;
9288 case ABI_V4:
9289 info_ptr->fp_save_offset = - info_ptr->fp_size;
9290 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
9291 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
9293 if (TARGET_SPE_ABI)
9295 /* Align stack so SPE GPR save area is aligned on a
9296 double-word boundary. */
9297 if (info_ptr->spe_gp_size != 0)
9298 info_ptr->spe_padding_size
9299 = 8 - (-info_ptr->cr_save_offset % 8);
9300 else
9301 info_ptr->spe_padding_size = 0;
9303 info_ptr->spe_gp_save_offset
9304 = info_ptr->cr_save_offset
9305 - info_ptr->spe_padding_size
9306 - info_ptr->spe_gp_size;
9308 /* Adjust for SPE case. */
9309 info_ptr->toc_save_offset
9310 = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
9312 else if (TARGET_ALTIVEC_ABI)
9314 info_ptr->vrsave_save_offset
9315 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
9317 /* Align stack so vector save area is on a quadword boundary. */
9318 if (info_ptr->altivec_size != 0)
9319 info_ptr->altivec_padding_size
9320 = 16 - (-info_ptr->vrsave_save_offset % 16);
9321 else
9322 info_ptr->altivec_padding_size = 0;
9324 info_ptr->altivec_save_offset
9325 = info_ptr->vrsave_save_offset
9326 - info_ptr->altivec_padding_size
9327 - info_ptr->altivec_size;
9329 /* Adjust for AltiVec case. */
9330 info_ptr->toc_save_offset
9331 = info_ptr->altivec_save_offset - info_ptr->toc_size;
9333 else
9334 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
9335 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
9336 info_ptr->lr_save_offset = reg_size;
9337 break;
9340 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
9341 + info_ptr->gp_size
9342 + info_ptr->altivec_size
9343 + info_ptr->altivec_padding_size
9344 + info_ptr->vrsave_size
9345 + info_ptr->spe_gp_size
9346 + info_ptr->spe_padding_size
9347 + ehrd_size
9348 + info_ptr->cr_size
9349 + info_ptr->lr_size
9350 + info_ptr->vrsave_size
9351 + info_ptr->toc_size,
9352 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
9353 ? 16 : 8);
9355 total_raw_size = (info_ptr->vars_size
9356 + info_ptr->parm_size
9357 + info_ptr->save_size
9358 + info_ptr->varargs_size
9359 + info_ptr->fixed_size);
9361 info_ptr->total_size =
9362 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
9364 /* Determine if we need to allocate any stack frame:
9366 For AIX we need to push the stack if a frame pointer is needed
9367 (because the stack might be dynamically adjusted), if we are
9368 debugging, if we make calls, or if the sum of fp_save, gp_save,
9369 and local variables are more than the space needed to save all
9370 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
9371 + 18*8 = 288 (GPR13 reserved).
9373 For V.4 we don't have the stack cushion that AIX uses, but assume
9374 that the debugger can handle stackless frames. */
9376 if (info_ptr->calls_p)
9377 info_ptr->push_p = 1;
9379 else if (abi == ABI_V4)
9380 info_ptr->push_p = total_raw_size > info_ptr->fixed_size;
9382 else
9383 info_ptr->push_p = (frame_pointer_needed
9384 || (abi != ABI_DARWIN && write_symbols != NO_DEBUG)
9385 || ((total_raw_size - info_ptr->fixed_size)
9386 > (TARGET_32BIT ? 220 : 288)));
9388 /* Zero offsets if we're not saving those registers. */
9389 if (info_ptr->fp_size == 0)
9390 info_ptr->fp_save_offset = 0;
9392 if (info_ptr->gp_size == 0)
9393 info_ptr->gp_save_offset = 0;
9395 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
9396 info_ptr->altivec_save_offset = 0;
9398 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
9399 info_ptr->vrsave_save_offset = 0;
9401 if (! TARGET_SPE_ABI || info_ptr->spe_gp_size == 0)
9402 info_ptr->spe_gp_save_offset = 0;
9404 if (! info_ptr->lr_save_p)
9405 info_ptr->lr_save_offset = 0;
9407 if (! info_ptr->cr_save_p)
9408 info_ptr->cr_save_offset = 0;
9410 if (! info_ptr->toc_save_p)
9411 info_ptr->toc_save_offset = 0;
9413 return info_ptr;
9416 void
9417 debug_stack_info (info)
9418 rs6000_stack_t *info;
9420 const char *abi_string;
9422 if (! info)
9423 info = rs6000_stack_info ();
9425 fprintf (stderr, "\nStack information for function %s:\n",
9426 ((current_function_decl && DECL_NAME (current_function_decl))
9427 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
9428 : "<unknown>"));
9430 switch (info->abi)
9432 default: abi_string = "Unknown"; break;
9433 case ABI_NONE: abi_string = "NONE"; break;
9434 case ABI_AIX:
9435 case ABI_AIX_NODESC: abi_string = "AIX"; break;
9436 case ABI_DARWIN: abi_string = "Darwin"; break;
9437 case ABI_V4: abi_string = "V.4"; break;
9440 fprintf (stderr, "\tABI = %5s\n", abi_string);
9442 if (TARGET_ALTIVEC_ABI)
9443 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
9445 if (TARGET_SPE_ABI)
9446 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
9448 if (info->first_gp_reg_save != 32)
9449 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
9451 if (info->first_fp_reg_save != 64)
9452 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
9454 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
9455 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
9456 info->first_altivec_reg_save);
9458 if (info->lr_save_p)
9459 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
9461 if (info->cr_save_p)
9462 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
9464 if (info->toc_save_p)
9465 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
9467 if (info->vrsave_mask)
9468 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
9470 if (info->push_p)
9471 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
9473 if (info->calls_p)
9474 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
9476 if (info->gp_save_offset)
9477 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
9479 if (info->fp_save_offset)
9480 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
9482 if (info->altivec_save_offset)
9483 fprintf (stderr, "\taltivec_save_offset = %5d\n",
9484 info->altivec_save_offset);
9486 if (info->spe_gp_save_offset)
9487 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
9488 info->spe_gp_save_offset);
9490 if (info->vrsave_save_offset)
9491 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
9492 info->vrsave_save_offset);
9494 if (info->lr_save_offset)
9495 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
9497 if (info->cr_save_offset)
9498 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
9500 if (info->toc_save_offset)
9501 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
9503 if (info->varargs_save_offset)
9504 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
9506 if (info->total_size)
9507 fprintf (stderr, "\ttotal_size = %5d\n", info->total_size);
9509 if (info->varargs_size)
9510 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
9512 if (info->vars_size)
9513 fprintf (stderr, "\tvars_size = %5d\n", info->vars_size);
9515 if (info->parm_size)
9516 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
9518 if (info->fixed_size)
9519 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
9521 if (info->gp_size)
9522 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
9524 if (info->spe_gp_size)
9525 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
9527 if (info->fp_size)
9528 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
9530 if (info->altivec_size)
9531 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
9533 if (info->vrsave_size)
9534 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
9536 if (info->altivec_padding_size)
9537 fprintf (stderr, "\taltivec_padding_size= %5d\n",
9538 info->altivec_padding_size);
9540 if (info->spe_padding_size)
9541 fprintf (stderr, "\tspe_padding_size = %5d\n",
9542 info->spe_padding_size);
9544 if (info->lr_size)
9545 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
9547 if (info->cr_size)
9548 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
9550 if (info->toc_size)
9551 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
9553 if (info->save_size)
9554 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
9556 if (info->reg_size != 4)
9557 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
9559 fprintf (stderr, "\n");
9563 rs6000_return_addr (count, frame)
9564 int count;
9565 rtx frame;
9567 /* Currently we don't optimize very well between prolog and body
9568 code and for PIC code the code can be actually quite bad, so
9569 don't try to be too clever here. */
9570 if (count != 0 || flag_pic != 0)
9572 cfun->machine->ra_needs_full_frame = 1;
9574 return
9575 gen_rtx_MEM
9576 (Pmode,
9577 memory_address
9578 (Pmode,
9579 plus_constant (copy_to_reg
9580 (gen_rtx_MEM (Pmode,
9581 memory_address (Pmode, frame))),
9582 RETURN_ADDRESS_OFFSET)));
9585 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
9588 /* Say whether a function is a candidate for sibcall handling or not.
9589 We do not allow indirect calls to be optimized into sibling calls.
9590 Also, we can't do it if there are any vector parameters; there's
9591 nowhere to put the VRsave code so it works; note that functions with
9592 vector parameters are required to have a prototype, so the argument
9593 type info must be available here. (The tail recursion case can work
9594 with vector parameters, but there's no way to distinguish here.) */
9595 static bool
9596 rs6000_function_ok_for_sibcall (decl, exp)
9597 tree decl;
9598 tree exp ATTRIBUTE_UNUSED;
9600 tree type;
9601 if (decl)
9603 if (TARGET_ALTIVEC_VRSAVE)
9605 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
9606 type; type = TREE_CHAIN (type))
9608 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
9609 return false;
9612 if (DEFAULT_ABI == ABI_DARWIN
9613 || (*targetm.binds_local_p) (decl))
9615 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
9617 if (!lookup_attribute ("longcall", attr_list)
9618 || lookup_attribute ("shortcall", attr_list))
9619 return true;
9622 return false;
9625 static int
9626 rs6000_ra_ever_killed ()
9628 rtx top;
9629 rtx reg;
9630 rtx insn;
9632 /* Irritatingly, there are two kinds of thunks -- those created with
9633 TARGET_ASM_OUTPUT_MI_THUNK and those with DECL_THUNK_P that go
9634 through the regular part of the compiler. This is a very hacky
9635 way to tell them apart. */
9636 if (current_function_is_thunk && !no_new_pseudos)
9637 return 0;
9639 /* regs_ever_live has LR marked as used if any sibcalls are present,
9640 but this should not force saving and restoring in the
9641 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
9642 clobbers LR, so that is inappropriate. */
9644 /* Also, the prologue can generate a store into LR that
9645 doesn't really count, like this:
9647 move LR->R0
9648 bcl to set PIC register
9649 move LR->R31
9650 move R0->LR
9652 When we're called from the epilogue, we need to avoid counting
9653 this as a store. */
9655 push_topmost_sequence ();
9656 top = get_insns ();
9657 pop_topmost_sequence ();
9658 reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
9660 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
9662 if (INSN_P (insn))
9664 if (FIND_REG_INC_NOTE (insn, reg))
9665 return 1;
9666 else if (GET_CODE (insn) == CALL_INSN
9667 && !SIBLING_CALL_P (insn))
9668 return 1;
9669 else if (set_of (reg, insn) != NULL_RTX
9670 && !prologue_epilogue_contains (insn))
9671 return 1;
9674 return 0;
9677 /* Add a REG_MAYBE_DEAD note to the insn. */
9678 static void
9679 rs6000_maybe_dead (insn)
9680 rtx insn;
9682 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
9683 const0_rtx,
9684 REG_NOTES (insn));
9687 /* Emit instructions needed to load the TOC register.
9688 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
9689 a constant pool; or for SVR4 -fpic. */
9691 void
9692 rs6000_emit_load_toc_table (fromprolog)
9693 int fromprolog;
9695 rtx dest;
9696 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
9698 if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
9700 rtx temp = (fromprolog
9701 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
9702 : gen_reg_rtx (Pmode));
9703 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_pic_si (temp)));
9704 rs6000_maybe_dead (emit_move_insn (dest, temp));
9706 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
9708 char buf[30];
9709 rtx tempLR = (fromprolog
9710 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
9711 : gen_reg_rtx (Pmode));
9712 rtx temp0 = (fromprolog
9713 ? gen_rtx_REG (Pmode, 0)
9714 : gen_reg_rtx (Pmode));
9715 rtx symF;
9717 /* possibly create the toc section */
9718 if (! toc_initialized)
9720 toc_section ();
9721 function_section (current_function_decl);
9724 if (fromprolog)
9726 rtx symL;
9728 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
9729 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9731 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
9732 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9734 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
9735 symF)));
9736 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
9737 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
9738 symL,
9739 symF)));
9741 else
9743 rtx tocsym;
9744 static int reload_toc_labelno = 0;
9746 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
9748 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
9749 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9751 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1b (tempLR,
9752 symF,
9753 tocsym)));
9754 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
9755 rs6000_maybe_dead (emit_move_insn (temp0,
9756 gen_rtx_MEM (Pmode, dest)));
9758 rs6000_maybe_dead (emit_insn (gen_addsi3 (dest, temp0, dest)));
9760 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
9762 /* This is for AIX code running in non-PIC ELF32. */
9763 char buf[30];
9764 rtx realsym;
9765 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
9766 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9768 rs6000_maybe_dead (emit_insn (gen_elf_high (dest, realsym)));
9769 rs6000_maybe_dead (emit_insn (gen_elf_low (dest, dest, realsym)));
9771 else if (DEFAULT_ABI == ABI_AIX)
9773 if (TARGET_32BIT)
9774 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_si (dest)));
9775 else
9776 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_di (dest)));
9778 else
9779 abort ();
9782 int
9783 get_TOC_alias_set ()
9785 static int set = -1;
9786 if (set == -1)
9787 set = new_alias_set ();
9788 return set;
9791 /* This retuns nonzero if the current function uses the TOC. This is
9792 determined by the presence of (unspec ... 7), which is generated by
9793 the various load_toc_* patterns. */
9796 uses_TOC ()
9798 rtx insn;
9800 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9801 if (INSN_P (insn))
9803 rtx pat = PATTERN (insn);
9804 int i;
9806 if (GET_CODE (pat) == PARALLEL)
9807 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
9808 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == UNSPEC
9809 && XINT (XVECEXP (PATTERN (insn), 0, i), 1) == 7)
9810 return 1;
9812 return 0;
9816 create_TOC_reference (symbol)
9817 rtx symbol;
9819 return gen_rtx_PLUS (Pmode,
9820 gen_rtx_REG (Pmode, TOC_REGISTER),
9821 gen_rtx_CONST (Pmode,
9822 gen_rtx_MINUS (Pmode, symbol,
9823 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
9826 #if TARGET_AIX
9827 /* __throw will restore its own return address to be the same as the
9828 return address of the function that the throw is being made to.
9829 This is unfortunate, because we want to check the original
9830 return address to see if we need to restore the TOC.
9831 So we have to squirrel it away here.
9832 This is used only in compiling __throw and __rethrow.
9834 Most of this code should be removed by CSE. */
9835 static rtx insn_after_throw;
9837 /* This does the saving... */
9838 void
9839 rs6000_aix_emit_builtin_unwind_init ()
9841 rtx mem;
9842 rtx stack_top = gen_reg_rtx (Pmode);
9843 rtx opcode_addr = gen_reg_rtx (Pmode);
9845 insn_after_throw = gen_reg_rtx (SImode);
9847 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
9848 emit_move_insn (stack_top, mem);
9850 mem = gen_rtx_MEM (Pmode,
9851 gen_rtx_PLUS (Pmode, stack_top,
9852 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
9853 emit_move_insn (opcode_addr, mem);
9854 emit_move_insn (insn_after_throw, gen_rtx_MEM (SImode, opcode_addr));
9857 /* Emit insns to _restore_ the TOC register, at runtime (specifically
9858 in _eh.o). Only used on AIX.
9860 The idea is that on AIX, function calls look like this:
9861 bl somefunction-trampoline
9862 lwz r2,20(sp)
9864 and later,
9865 somefunction-trampoline:
9866 stw r2,20(sp)
9867 ... load function address in the count register ...
9868 bctr
9869 or like this, if the linker determines that this is not a cross-module call
9870 and so the TOC need not be restored:
9871 bl somefunction
9873 or like this, if the compiler could determine that this is not a
9874 cross-module call:
9875 bl somefunction
9876 now, the tricky bit here is that register 2 is saved and restored
9877 by the _linker_, so we can't readily generate debugging information
9878 for it. So we need to go back up the call chain looking at the
9879 insns at return addresses to see which calls saved the TOC register
9880 and so see where it gets restored from.
9882 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
9883 just before the actual epilogue.
9885 On the bright side, this incurs no space or time overhead unless an
9886 exception is thrown, except for the extra code in libgcc.a.
9888 The parameter STACKSIZE is a register containing (at runtime)
9889 the amount to be popped off the stack in addition to the stack frame
9890 of this routine (which will be __throw or __rethrow, and so is
9891 guaranteed to have a stack frame). */
9893 void
9894 rs6000_emit_eh_toc_restore (stacksize)
9895 rtx stacksize;
9897 rtx top_of_stack;
9898 rtx bottom_of_stack = gen_reg_rtx (Pmode);
9899 rtx tocompare = gen_reg_rtx (SImode);
9900 rtx opcode = gen_reg_rtx (SImode);
9901 rtx opcode_addr = gen_reg_rtx (Pmode);
9902 rtx mem;
9903 rtx loop_start = gen_label_rtx ();
9904 rtx no_toc_restore_needed = gen_label_rtx ();
9905 rtx loop_exit = gen_label_rtx ();
9907 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
9908 set_mem_alias_set (mem, rs6000_sr_alias_set);
9909 emit_move_insn (bottom_of_stack, mem);
9911 top_of_stack = expand_binop (Pmode, add_optab,
9912 bottom_of_stack, stacksize,
9913 NULL_RTX, 1, OPTAB_WIDEN);
9915 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
9916 : 0xE8410028, SImode));
9918 if (insn_after_throw == NULL_RTX)
9919 abort ();
9920 emit_move_insn (opcode, insn_after_throw);
9922 emit_note (NULL, NOTE_INSN_LOOP_BEG);
9923 emit_label (loop_start);
9925 do_compare_rtx_and_jump (opcode, tocompare, NE, 1,
9926 SImode, NULL_RTX, NULL_RTX,
9927 no_toc_restore_needed);
9929 mem = gen_rtx_MEM (Pmode,
9930 gen_rtx_PLUS (Pmode, bottom_of_stack,
9931 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
9932 emit_move_insn (gen_rtx_REG (Pmode, 2), mem);
9934 emit_label (no_toc_restore_needed);
9935 do_compare_rtx_and_jump (top_of_stack, bottom_of_stack, EQ, 1,
9936 Pmode, NULL_RTX, NULL_RTX,
9937 loop_exit);
9939 mem = gen_rtx_MEM (Pmode, bottom_of_stack);
9940 set_mem_alias_set (mem, rs6000_sr_alias_set);
9941 emit_move_insn (bottom_of_stack, mem);
9943 mem = gen_rtx_MEM (Pmode,
9944 gen_rtx_PLUS (Pmode, bottom_of_stack,
9945 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
9946 emit_move_insn (opcode_addr, mem);
9947 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
9949 emit_note (NULL, NOTE_INSN_LOOP_CONT);
9950 emit_jump (loop_start);
9951 emit_note (NULL, NOTE_INSN_LOOP_END);
9952 emit_label (loop_exit);
9954 #endif /* TARGET_AIX */
9956 /* This ties together stack memory (MEM with an alias set of
9957 rs6000_sr_alias_set) and the change to the stack pointer. */
9959 static void
9960 rs6000_emit_stack_tie ()
9962 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
9964 set_mem_alias_set (mem, rs6000_sr_alias_set);
9965 emit_insn (gen_stack_tie (mem));
9968 /* Emit the correct code for allocating stack space, as insns.
9969 If COPY_R12, make sure a copy of the old frame is left in r12.
9970 The generated code may use hard register 0 as a temporary. */
9972 static void
9973 rs6000_emit_allocate_stack (size, copy_r12)
9974 HOST_WIDE_INT size;
9975 int copy_r12;
9977 rtx insn;
9978 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
9979 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
9980 rtx todec = GEN_INT (-size);
9982 if (current_function_limit_stack)
9984 if (REG_P (stack_limit_rtx)
9985 && REGNO (stack_limit_rtx) > 1
9986 && REGNO (stack_limit_rtx) <= 31)
9988 emit_insn (TARGET_32BIT
9989 ? gen_addsi3 (tmp_reg,
9990 stack_limit_rtx,
9991 GEN_INT (size))
9992 : gen_adddi3 (tmp_reg,
9993 stack_limit_rtx,
9994 GEN_INT (size)));
9996 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
9997 const0_rtx));
9999 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
10000 && TARGET_32BIT
10001 && DEFAULT_ABI == ABI_V4)
10003 rtx toload = gen_rtx_CONST (VOIDmode,
10004 gen_rtx_PLUS (Pmode,
10005 stack_limit_rtx,
10006 GEN_INT (size)));
10008 emit_insn (gen_elf_high (tmp_reg, toload));
10009 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
10010 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
10011 const0_rtx));
10013 else
10014 warning ("stack limit expression is not supported");
10017 if (copy_r12 || ! TARGET_UPDATE)
10018 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
10020 if (TARGET_UPDATE)
10022 if (size > 32767)
10024 /* Need a note here so that try_split doesn't get confused. */
10025 if (get_last_insn() == NULL_RTX)
10026 emit_note (0, NOTE_INSN_DELETED);
10027 insn = emit_move_insn (tmp_reg, todec);
10028 try_split (PATTERN (insn), insn, 0);
10029 todec = tmp_reg;
10032 insn = emit_insn (TARGET_32BIT
10033 ? gen_movsi_update (stack_reg, stack_reg,
10034 todec, stack_reg)
10035 : gen_movdi_update (stack_reg, stack_reg,
10036 todec, stack_reg));
10038 else
10040 insn = emit_insn (TARGET_32BIT
10041 ? gen_addsi3 (stack_reg, stack_reg, todec)
10042 : gen_adddi3 (stack_reg, stack_reg, todec));
10043 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
10044 gen_rtx_REG (Pmode, 12));
10047 RTX_FRAME_RELATED_P (insn) = 1;
10048 REG_NOTES (insn) =
10049 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
10050 gen_rtx_SET (VOIDmode, stack_reg,
10051 gen_rtx_PLUS (Pmode, stack_reg,
10052 GEN_INT (-size))),
10053 REG_NOTES (insn));
10056 /* Add a RTX_FRAME_RELATED note so that dwarf2out_frame_debug_expr
10057 knows that:
10059 (mem (plus (blah) (regXX)))
10061 is really:
10063 (mem (plus (blah) (const VALUE_OF_REGXX))). */
10065 static void
10066 altivec_frame_fixup (insn, reg, val)
10067 rtx insn, reg;
10068 HOST_WIDE_INT val;
10070 rtx real;
10072 real = copy_rtx (PATTERN (insn));
10074 real = replace_rtx (real, reg, GEN_INT (val));
10076 RTX_FRAME_RELATED_P (insn) = 1;
10077 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
10078 real,
10079 REG_NOTES (insn));
10082 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
10083 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
10084 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
10085 deduce these equivalences by itself so it wasn't necessary to hold
10086 its hand so much. */
10088 static void
10089 rs6000_frame_related (insn, reg, val, reg2, rreg)
10090 rtx insn;
10091 rtx reg;
10092 HOST_WIDE_INT val;
10093 rtx reg2;
10094 rtx rreg;
10096 rtx real, temp;
10098 /* copy_rtx will not make unique copies of registers, so we need to
10099 ensure we don't have unwanted sharing here. */
10100 if (reg == reg2)
10101 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
10103 if (reg == rreg)
10104 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
10106 real = copy_rtx (PATTERN (insn));
10108 if (reg2 != NULL_RTX)
10109 real = replace_rtx (real, reg2, rreg);
10111 real = replace_rtx (real, reg,
10112 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
10113 STACK_POINTER_REGNUM),
10114 GEN_INT (val)));
10116 /* We expect that 'real' is either a SET or a PARALLEL containing
10117 SETs (and possibly other stuff). In a PARALLEL, all the SETs
10118 are important so they all have to be marked RTX_FRAME_RELATED_P. */
10120 if (GET_CODE (real) == SET)
10122 rtx set = real;
10124 temp = simplify_rtx (SET_SRC (set));
10125 if (temp)
10126 SET_SRC (set) = temp;
10127 temp = simplify_rtx (SET_DEST (set));
10128 if (temp)
10129 SET_DEST (set) = temp;
10130 if (GET_CODE (SET_DEST (set)) == MEM)
10132 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
10133 if (temp)
10134 XEXP (SET_DEST (set), 0) = temp;
10137 else if (GET_CODE (real) == PARALLEL)
10139 int i;
10140 for (i = 0; i < XVECLEN (real, 0); i++)
10141 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
10143 rtx set = XVECEXP (real, 0, i);
10145 temp = simplify_rtx (SET_SRC (set));
10146 if (temp)
10147 SET_SRC (set) = temp;
10148 temp = simplify_rtx (SET_DEST (set));
10149 if (temp)
10150 SET_DEST (set) = temp;
10151 if (GET_CODE (SET_DEST (set)) == MEM)
10153 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
10154 if (temp)
10155 XEXP (SET_DEST (set), 0) = temp;
10157 RTX_FRAME_RELATED_P (set) = 1;
10160 else
10161 abort ();
10163 RTX_FRAME_RELATED_P (insn) = 1;
10164 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
10165 real,
10166 REG_NOTES (insn));
10169 /* Returns an insn that has a vrsave set operation with the
10170 appropriate CLOBBERs. */
10172 static rtx
10173 generate_set_vrsave (reg, info, epiloguep)
10174 rtx reg;
10175 rs6000_stack_t *info;
10176 int epiloguep;
10178 int nclobs, i;
10179 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
10180 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
10182 clobs[0]
10183 = gen_rtx_SET (VOIDmode,
10184 vrsave,
10185 gen_rtx_UNSPEC_VOLATILE (SImode,
10186 gen_rtvec (2, reg, vrsave),
10187 30));
10189 nclobs = 1;
10191 /* We need to clobber the registers in the mask so the scheduler
10192 does not move sets to VRSAVE before sets of AltiVec registers.
10194 However, if the function receives nonlocal gotos, reload will set
10195 all call saved registers live. We will end up with:
10197 (set (reg 999) (mem))
10198 (parallel [ (set (reg vrsave) (unspec blah))
10199 (clobber (reg 999))])
10201 The clobber will cause the store into reg 999 to be dead, and
10202 flow will attempt to delete an epilogue insn. In this case, we
10203 need an unspec use/set of the register. */
10205 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
10206 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
10208 if (!epiloguep || call_used_regs [i])
10209 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
10210 gen_rtx_REG (V4SImode, i));
10211 else
10213 rtx reg = gen_rtx_REG (V4SImode, i);
10215 clobs[nclobs++]
10216 = gen_rtx_SET (VOIDmode,
10217 reg,
10218 gen_rtx_UNSPEC (V4SImode,
10219 gen_rtvec (1, reg), 27));
10223 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
10225 for (i = 0; i < nclobs; ++i)
10226 XVECEXP (insn, 0, i) = clobs[i];
10228 return insn;
10231 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
10232 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
10234 static void
10235 emit_frame_save (frame_reg, frame_ptr, mode, regno, offset, total_size)
10236 rtx frame_reg;
10237 rtx frame_ptr;
10238 enum machine_mode mode;
10239 unsigned int regno;
10240 int offset;
10241 int total_size;
10243 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
10244 rtx replacea, replaceb;
10246 int_rtx = GEN_INT (offset);
10248 /* Some cases that need register indexed addressing. */
10249 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
10250 || (TARGET_SPE_ABI
10251 && SPE_VECTOR_MODE (mode)
10252 && !SPE_CONST_OFFSET_OK (offset)))
10254 /* Whomever calls us must make sure r11 is available in the
10255 flow path of instructions in the prologue. */
10256 offset_rtx = gen_rtx_REG (Pmode, 11);
10257 emit_move_insn (offset_rtx, int_rtx);
10259 replacea = offset_rtx;
10260 replaceb = int_rtx;
10262 else
10264 offset_rtx = int_rtx;
10265 replacea = NULL_RTX;
10266 replaceb = NULL_RTX;
10269 reg = gen_rtx_REG (mode, regno);
10270 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
10271 mem = gen_rtx_MEM (mode, addr);
10272 set_mem_alias_set (mem, rs6000_sr_alias_set);
10274 insn = emit_move_insn (mem, reg);
10276 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
10279 /* Emit an offset memory reference suitable for a frame store, while
10280 converting to a valid addressing mode. */
10282 static rtx
10283 gen_frame_mem_offset (mode, reg, offset)
10284 enum machine_mode mode;
10285 rtx reg;
10286 int offset;
10288 rtx int_rtx, offset_rtx;
10290 int_rtx = GEN_INT (offset);
10292 if (TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
10294 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
10295 emit_move_insn (offset_rtx, int_rtx);
10297 else
10298 offset_rtx = int_rtx;
10300 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
10303 /* Emit function prologue as insns. */
10305 void
10306 rs6000_emit_prologue ()
10308 rs6000_stack_t *info = rs6000_stack_info ();
10309 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
10310 int reg_size = TARGET_POWERPC64 ? 8 : 4;
10311 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
10312 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
10313 rtx frame_reg_rtx = sp_reg_rtx;
10314 rtx cr_save_rtx = NULL;
10315 rtx insn;
10316 int saving_FPRs_inline;
10317 int using_store_multiple;
10318 HOST_WIDE_INT sp_offset = 0;
10320 if (TARGET_SPE_ABI)
10322 reg_mode = V2SImode;
10323 reg_size = 8;
10326 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
10327 && !TARGET_SPE_ABI
10328 && info->first_gp_reg_save < 31);
10329 saving_FPRs_inline = (info->first_fp_reg_save == 64
10330 || FP_SAVE_INLINE (info->first_fp_reg_save));
10332 /* For V.4, update stack before we do any saving and set back pointer. */
10333 if (info->push_p && DEFAULT_ABI == ABI_V4)
10335 if (info->total_size < 32767)
10336 sp_offset = info->total_size;
10337 else
10338 frame_reg_rtx = frame_ptr_rtx;
10339 rs6000_emit_allocate_stack (info->total_size,
10340 (frame_reg_rtx != sp_reg_rtx
10341 && (info->cr_save_p
10342 || info->lr_save_p
10343 || info->first_fp_reg_save < 64
10344 || info->first_gp_reg_save < 32
10345 )));
10346 if (frame_reg_rtx != sp_reg_rtx)
10347 rs6000_emit_stack_tie ();
10350 /* Save AltiVec registers if needed. */
10351 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
10353 int i;
10355 /* There should be a non inline version of this, for when we
10356 are saving lots of vector registers. */
10357 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
10358 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
10360 rtx areg, savereg, mem;
10361 int offset;
10363 offset = info->altivec_save_offset + sp_offset
10364 + 16 * (i - info->first_altivec_reg_save);
10366 savereg = gen_rtx_REG (V4SImode, i);
10368 areg = gen_rtx_REG (Pmode, 0);
10369 emit_move_insn (areg, GEN_INT (offset));
10371 /* AltiVec addressing mode is [reg+reg]. */
10372 mem = gen_rtx_MEM (V4SImode,
10373 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
10375 set_mem_alias_set (mem, rs6000_sr_alias_set);
10377 insn = emit_move_insn (mem, savereg);
10379 altivec_frame_fixup (insn, areg, offset);
10383 /* VRSAVE is a bit vector representing which AltiVec registers
10384 are used. The OS uses this to determine which vector
10385 registers to save on a context switch. We need to save
10386 VRSAVE on the stack frame, add whatever AltiVec registers we
10387 used in this function, and do the corresponding magic in the
10388 epilogue. */
10390 if (TARGET_ALTIVEC && info->vrsave_mask != 0)
10392 rtx reg, mem, vrsave;
10393 int offset;
10395 /* Get VRSAVE onto a GPR. */
10396 reg = gen_rtx_REG (SImode, 12);
10397 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
10398 if (TARGET_MACHO)
10399 emit_insn (gen_get_vrsave_internal (reg));
10400 else
10401 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
10403 /* Save VRSAVE. */
10404 offset = info->vrsave_save_offset + sp_offset;
10406 = gen_rtx_MEM (SImode,
10407 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
10408 set_mem_alias_set (mem, rs6000_sr_alias_set);
10409 insn = emit_move_insn (mem, reg);
10411 /* Include the registers in the mask. */
10412 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
10414 insn = emit_insn (generate_set_vrsave (reg, info, 0));
10417 /* If we use the link register, get it into r0. */
10418 if (info->lr_save_p)
10419 emit_move_insn (gen_rtx_REG (Pmode, 0),
10420 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
10422 /* If we need to save CR, put it into r12. */
10423 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
10425 cr_save_rtx = gen_rtx_REG (SImode, 12);
10426 emit_insn (gen_movesi_from_cr (cr_save_rtx));
10429 /* Do any required saving of fpr's. If only one or two to save, do
10430 it ourselves. Otherwise, call function. */
10431 if (saving_FPRs_inline)
10433 int i;
10434 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
10435 if ((regs_ever_live[info->first_fp_reg_save+i]
10436 && ! call_used_regs[info->first_fp_reg_save+i]))
10437 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
10438 info->first_fp_reg_save + i,
10439 info->fp_save_offset + sp_offset + 8 * i,
10440 info->total_size);
10442 else if (info->first_fp_reg_save != 64)
10444 int i;
10445 char rname[30];
10446 const char *alloc_rname;
10447 rtvec p;
10448 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
10450 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
10451 gen_rtx_REG (Pmode,
10452 LINK_REGISTER_REGNUM));
10453 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
10454 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
10455 alloc_rname = ggc_strdup (rname);
10456 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
10457 gen_rtx_SYMBOL_REF (Pmode,
10458 alloc_rname));
10459 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
10461 rtx addr, reg, mem;
10462 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
10463 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10464 GEN_INT (info->fp_save_offset
10465 + sp_offset + 8*i));
10466 mem = gen_rtx_MEM (DFmode, addr);
10467 set_mem_alias_set (mem, rs6000_sr_alias_set);
10469 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
10471 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10472 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10473 NULL_RTX, NULL_RTX);
10476 /* Save GPRs. This is done as a PARALLEL if we are using
10477 the store-multiple instructions. */
10478 if (using_store_multiple)
10480 rtvec p;
10481 int i;
10482 p = rtvec_alloc (32 - info->first_gp_reg_save);
10483 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10485 rtx addr, reg, mem;
10486 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
10487 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10488 GEN_INT (info->gp_save_offset
10489 + sp_offset
10490 + reg_size * i));
10491 mem = gen_rtx_MEM (reg_mode, addr);
10492 set_mem_alias_set (mem, rs6000_sr_alias_set);
10494 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
10496 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10497 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10498 NULL_RTX, NULL_RTX);
10500 else
10502 int i;
10503 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10504 if ((regs_ever_live[info->first_gp_reg_save+i]
10505 && ! call_used_regs[info->first_gp_reg_save+i])
10506 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
10507 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
10508 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
10510 rtx addr, reg, mem;
10511 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
10513 if (TARGET_SPE_ABI)
10515 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
10516 rtx b;
10518 if (!SPE_CONST_OFFSET_OK (offset))
10520 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
10521 emit_move_insn (b, GEN_INT (offset));
10523 else
10524 b = GEN_INT (offset);
10526 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
10527 mem = gen_rtx_MEM (V2SImode, addr);
10528 set_mem_alias_set (mem, rs6000_sr_alias_set);
10529 insn = emit_move_insn (mem, reg);
10531 if (GET_CODE (b) == CONST_INT)
10532 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10533 NULL_RTX, NULL_RTX);
10534 else
10535 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10536 b, GEN_INT (offset));
10538 else
10540 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10541 GEN_INT (info->gp_save_offset
10542 + sp_offset
10543 + reg_size * i));
10544 mem = gen_rtx_MEM (reg_mode, addr);
10545 set_mem_alias_set (mem, rs6000_sr_alias_set);
10547 insn = emit_move_insn (mem, reg);
10548 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10549 NULL_RTX, NULL_RTX);
10554 /* ??? There's no need to emit actual instructions here, but it's the
10555 easiest way to get the frame unwind information emitted. */
10556 if (current_function_calls_eh_return)
10558 unsigned int i, regno;
10560 for (i = 0; ; ++i)
10562 regno = EH_RETURN_DATA_REGNO (i);
10563 if (regno == INVALID_REGNUM)
10564 break;
10566 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
10567 info->ehrd_offset + sp_offset
10568 + reg_size * (int) i,
10569 info->total_size);
10573 /* Save lr if we used it. */
10574 if (info->lr_save_p)
10576 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10577 GEN_INT (info->lr_save_offset + sp_offset));
10578 rtx reg = gen_rtx_REG (Pmode, 0);
10579 rtx mem = gen_rtx_MEM (Pmode, addr);
10580 /* This should not be of rs6000_sr_alias_set, because of
10581 __builtin_return_address. */
10583 insn = emit_move_insn (mem, reg);
10584 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10585 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
10588 /* Save CR if we use any that must be preserved. */
10589 if (info->cr_save_p)
10591 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10592 GEN_INT (info->cr_save_offset + sp_offset));
10593 rtx mem = gen_rtx_MEM (SImode, addr);
10595 set_mem_alias_set (mem, rs6000_sr_alias_set);
10597 /* If r12 was used to hold the original sp, copy cr into r0 now
10598 that it's free. */
10599 if (REGNO (frame_reg_rtx) == 12)
10601 cr_save_rtx = gen_rtx_REG (SImode, 0);
10602 emit_insn (gen_movesi_from_cr (cr_save_rtx));
10604 insn = emit_move_insn (mem, cr_save_rtx);
10606 /* Now, there's no way that dwarf2out_frame_debug_expr is going
10607 to understand '(unspec:SI [(reg:CC 68) ...] 19)'. But that's
10608 OK. All we have to do is specify that _one_ condition code
10609 register is saved in this stack slot. The thrower's epilogue
10610 will then restore all the call-saved registers.
10611 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
10612 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10613 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
10616 /* Update stack and set back pointer unless this is V.4,
10617 for which it was done previously. */
10618 if (info->push_p && DEFAULT_ABI != ABI_V4)
10619 rs6000_emit_allocate_stack (info->total_size, FALSE);
10621 /* Set frame pointer, if needed. */
10622 if (frame_pointer_needed)
10624 insn = emit_move_insn (gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM),
10625 sp_reg_rtx);
10626 RTX_FRAME_RELATED_P (insn) = 1;
10629 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
10630 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
10631 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
10632 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
10634 /* If emit_load_toc_table will use the link register, we need to save
10635 it. We use R11 for this purpose because emit_load_toc_table
10636 can use register 0. This allows us to use a plain 'blr' to return
10637 from the procedure more often. */
10638 int save_LR_around_toc_setup = (TARGET_ELF && flag_pic != 0
10639 && ! info->lr_save_p
10640 && EXIT_BLOCK_PTR->pred != NULL);
10641 if (save_LR_around_toc_setup)
10642 emit_move_insn (gen_rtx_REG (Pmode, 11),
10643 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
10645 rs6000_emit_load_toc_table (TRUE);
10647 if (save_LR_around_toc_setup)
10648 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
10649 gen_rtx_REG (Pmode, 11));
10652 #if TARGET_MACHO
10653 if (DEFAULT_ABI == ABI_DARWIN
10654 && flag_pic && current_function_uses_pic_offset_table)
10656 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
10657 const char *picbase = machopic_function_base_name ();
10658 rtx src = gen_rtx_SYMBOL_REF (Pmode, ggc_alloc_string (picbase, -1));
10660 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest, src)));
10662 rs6000_maybe_dead (
10663 emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
10664 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
10666 #endif
10669 /* Write function prologue. */
10671 static void
10672 rs6000_output_function_prologue (file, size)
10673 FILE *file;
10674 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
10676 rs6000_stack_t *info = rs6000_stack_info ();
10678 if (TARGET_DEBUG_STACK)
10679 debug_stack_info (info);
10681 /* Write .extern for any function we will call to save and restore
10682 fp values. */
10683 if (info->first_fp_reg_save < 64
10684 && !FP_SAVE_INLINE (info->first_fp_reg_save))
10685 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
10686 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
10687 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
10688 RESTORE_FP_SUFFIX);
10690 /* Write .extern for AIX common mode routines, if needed. */
10691 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
10693 fputs ("\t.extern __mulh\n", file);
10694 fputs ("\t.extern __mull\n", file);
10695 fputs ("\t.extern __divss\n", file);
10696 fputs ("\t.extern __divus\n", file);
10697 fputs ("\t.extern __quoss\n", file);
10698 fputs ("\t.extern __quous\n", file);
10699 common_mode_defined = 1;
10702 if (! HAVE_prologue)
10704 start_sequence ();
10706 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
10707 the "toplevel" insn chain. */
10708 emit_note (0, NOTE_INSN_DELETED);
10709 rs6000_emit_prologue ();
10710 emit_note (0, NOTE_INSN_DELETED);
10712 /* Expand INSN_ADDRESSES so final() doesn't crash. */
10714 rtx insn;
10715 unsigned addr = 0;
10716 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
10718 INSN_ADDRESSES_NEW (insn, addr);
10719 addr += 4;
10723 if (TARGET_DEBUG_STACK)
10724 debug_rtx_list (get_insns (), 100);
10725 final (get_insns (), file, FALSE, FALSE);
10726 end_sequence ();
10729 rs6000_pic_labelno++;
10732 /* Emit function epilogue as insns.
10734 At present, dwarf2out_frame_debug_expr doesn't understand
10735 register restores, so we don't bother setting RTX_FRAME_RELATED_P
10736 anywhere in the epilogue. Most of the insns below would in any case
10737 need special notes to explain where r11 is in relation to the stack. */
10739 void
10740 rs6000_emit_epilogue (sibcall)
10741 int sibcall;
10743 rs6000_stack_t *info;
10744 int restoring_FPRs_inline;
10745 int using_load_multiple;
10746 int using_mfcr_multiple;
10747 int use_backchain_to_restore_sp;
10748 int sp_offset = 0;
10749 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
10750 rtx frame_reg_rtx = sp_reg_rtx;
10751 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
10752 int reg_size = TARGET_POWERPC64 ? 8 : 4;
10753 int i;
10755 if (TARGET_SPE_ABI)
10757 reg_mode = V2SImode;
10758 reg_size = 8;
10761 info = rs6000_stack_info ();
10762 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
10763 && !TARGET_SPE_ABI
10764 && info->first_gp_reg_save < 31);
10765 restoring_FPRs_inline = (sibcall
10766 || current_function_calls_eh_return
10767 || info->first_fp_reg_save == 64
10768 || FP_SAVE_INLINE (info->first_fp_reg_save));
10769 use_backchain_to_restore_sp = (frame_pointer_needed
10770 || current_function_calls_alloca
10771 || info->total_size > 32767);
10772 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
10773 || rs6000_cpu == PROCESSOR_PPC603
10774 || rs6000_cpu == PROCESSOR_PPC750
10775 || optimize_size);
10777 /* If we have a frame pointer, a call to alloca, or a large stack
10778 frame, restore the old stack pointer using the backchain. Otherwise,
10779 we know what size to update it with. */
10780 if (use_backchain_to_restore_sp)
10782 /* Under V.4, don't reset the stack pointer until after we're done
10783 loading the saved registers. */
10784 if (DEFAULT_ABI == ABI_V4)
10785 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
10787 emit_move_insn (frame_reg_rtx,
10788 gen_rtx_MEM (Pmode, sp_reg_rtx));
10791 else if (info->push_p)
10793 if (DEFAULT_ABI == ABI_V4)
10794 sp_offset = info->total_size;
10795 else
10797 emit_insn (TARGET_32BIT
10798 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
10799 GEN_INT (info->total_size))
10800 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
10801 GEN_INT (info->total_size)));
10805 /* Restore AltiVec registers if needed. */
10806 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
10808 int i;
10810 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
10811 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
10813 rtx addr, areg, mem;
10815 areg = gen_rtx_REG (Pmode, 0);
10816 emit_move_insn
10817 (areg, GEN_INT (info->altivec_save_offset
10818 + sp_offset
10819 + 16 * (i - info->first_altivec_reg_save)));
10821 /* AltiVec addressing mode is [reg+reg]. */
10822 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
10823 mem = gen_rtx_MEM (V4SImode, addr);
10824 set_mem_alias_set (mem, rs6000_sr_alias_set);
10826 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
10830 /* Restore VRSAVE if needed. */
10831 if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
10833 rtx addr, mem, reg;
10835 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10836 GEN_INT (info->vrsave_save_offset + sp_offset));
10837 mem = gen_rtx_MEM (SImode, addr);
10838 set_mem_alias_set (mem, rs6000_sr_alias_set);
10839 reg = gen_rtx_REG (SImode, 12);
10840 emit_move_insn (reg, mem);
10842 emit_insn (generate_set_vrsave (reg, info, 1));
10845 /* Get the old lr if we saved it. */
10846 if (info->lr_save_p)
10848 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
10849 info->lr_save_offset + sp_offset);
10851 set_mem_alias_set (mem, rs6000_sr_alias_set);
10853 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
10856 /* Get the old cr if we saved it. */
10857 if (info->cr_save_p)
10859 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10860 GEN_INT (info->cr_save_offset + sp_offset));
10861 rtx mem = gen_rtx_MEM (SImode, addr);
10863 set_mem_alias_set (mem, rs6000_sr_alias_set);
10865 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
10868 /* Set LR here to try to overlap restores below. */
10869 if (info->lr_save_p)
10870 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
10871 gen_rtx_REG (Pmode, 0));
10873 /* Load exception handler data registers, if needed. */
10874 if (current_function_calls_eh_return)
10876 unsigned int i, regno;
10878 for (i = 0; ; ++i)
10880 rtx mem;
10882 regno = EH_RETURN_DATA_REGNO (i);
10883 if (regno == INVALID_REGNUM)
10884 break;
10886 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
10887 info->ehrd_offset + sp_offset
10888 + reg_size * (int) i);
10889 set_mem_alias_set (mem, rs6000_sr_alias_set);
10891 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
10895 /* Restore GPRs. This is done as a PARALLEL if we are using
10896 the load-multiple instructions. */
10897 if (using_load_multiple)
10899 rtvec p;
10900 p = rtvec_alloc (32 - info->first_gp_reg_save);
10901 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10903 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10904 GEN_INT (info->gp_save_offset
10905 + sp_offset
10906 + reg_size * i));
10907 rtx mem = gen_rtx_MEM (reg_mode, addr);
10909 set_mem_alias_set (mem, rs6000_sr_alias_set);
10911 RTVEC_ELT (p, i) =
10912 gen_rtx_SET (VOIDmode,
10913 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
10914 mem);
10916 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10918 else
10919 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10920 if ((regs_ever_live[info->first_gp_reg_save+i]
10921 && ! call_used_regs[info->first_gp_reg_save+i])
10922 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
10923 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
10924 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
10926 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10927 GEN_INT (info->gp_save_offset
10928 + sp_offset
10929 + reg_size * i));
10930 rtx mem = gen_rtx_MEM (reg_mode, addr);
10932 /* Restore 64-bit quantities for SPE. */
10933 if (TARGET_SPE_ABI)
10935 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
10936 rtx b;
10938 if (!SPE_CONST_OFFSET_OK (offset))
10940 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
10941 emit_move_insn (b, GEN_INT (offset));
10943 else
10944 b = GEN_INT (offset);
10946 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
10947 mem = gen_rtx_MEM (V2SImode, addr);
10950 set_mem_alias_set (mem, rs6000_sr_alias_set);
10952 emit_move_insn (gen_rtx_REG (reg_mode,
10953 info->first_gp_reg_save + i), mem);
10956 /* Restore fpr's if we need to do it without calling a function. */
10957 if (restoring_FPRs_inline)
10958 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
10959 if ((regs_ever_live[info->first_fp_reg_save+i]
10960 && ! call_used_regs[info->first_fp_reg_save+i]))
10962 rtx addr, mem;
10963 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10964 GEN_INT (info->fp_save_offset
10965 + sp_offset
10966 + 8 * i));
10967 mem = gen_rtx_MEM (DFmode, addr);
10968 set_mem_alias_set (mem, rs6000_sr_alias_set);
10970 emit_move_insn (gen_rtx_REG (DFmode,
10971 info->first_fp_reg_save + i),
10972 mem);
10975 /* If we saved cr, restore it here. Just those that were used. */
10976 if (info->cr_save_p)
10978 rtx r12_rtx = gen_rtx_REG (SImode, 12);
10979 int count = 0;
10981 if (using_mfcr_multiple)
10983 for (i = 0; i < 8; i++)
10984 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
10985 count++;
10986 if (count == 0)
10987 abort ();
10990 if (using_mfcr_multiple && count > 1)
10992 rtvec p;
10993 int ndx;
10995 p = rtvec_alloc (count);
10997 ndx = 0;
10998 for (i = 0; i < 8; i++)
10999 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
11001 rtvec r = rtvec_alloc (2);
11002 RTVEC_ELT (r, 0) = r12_rtx;
11003 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
11004 RTVEC_ELT (p, ndx) =
11005 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
11006 gen_rtx_UNSPEC (CCmode, r, 20));
11007 ndx++;
11009 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11010 if (ndx != count)
11011 abort ();
11013 else
11014 for (i = 0; i < 8; i++)
11015 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
11017 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
11018 CR0_REGNO+i),
11019 r12_rtx));
11023 /* If this is V.4, unwind the stack pointer after all of the loads
11024 have been done. We need to emit a block here so that sched
11025 doesn't decide to move the sp change before the register restores
11026 (which may not have any obvious dependency on the stack). This
11027 doesn't hurt performance, because there is no scheduling that can
11028 be done after this point. */
11029 if (DEFAULT_ABI == ABI_V4)
11031 if (frame_reg_rtx != sp_reg_rtx)
11032 rs6000_emit_stack_tie ();
11034 if (use_backchain_to_restore_sp)
11036 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
11038 else if (sp_offset != 0)
11040 emit_insn (TARGET_32BIT
11041 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
11042 GEN_INT (sp_offset))
11043 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
11044 GEN_INT (sp_offset)));
11048 if (current_function_calls_eh_return)
11050 rtx sa = EH_RETURN_STACKADJ_RTX;
11051 emit_insn (TARGET_32BIT
11052 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
11053 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
11056 if (!sibcall)
11058 rtvec p;
11059 if (! restoring_FPRs_inline)
11060 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
11061 else
11062 p = rtvec_alloc (2);
11064 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
11065 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
11066 gen_rtx_REG (Pmode,
11067 LINK_REGISTER_REGNUM));
11069 /* If we have to restore more than two FP registers, branch to the
11070 restore function. It will return to our caller. */
11071 if (! restoring_FPRs_inline)
11073 int i;
11074 char rname[30];
11075 const char *alloc_rname;
11077 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
11078 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
11079 alloc_rname = ggc_strdup (rname);
11080 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
11081 gen_rtx_SYMBOL_REF (Pmode,
11082 alloc_rname));
11084 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11086 rtx addr, mem;
11087 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
11088 GEN_INT (info->fp_save_offset + 8*i));
11089 mem = gen_rtx_MEM (DFmode, addr);
11090 set_mem_alias_set (mem, rs6000_sr_alias_set);
11092 RTVEC_ELT (p, i+3) =
11093 gen_rtx_SET (VOIDmode,
11094 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
11095 mem);
11099 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
11103 /* Write function epilogue. */
11105 static void
11106 rs6000_output_function_epilogue (file, size)
11107 FILE *file;
11108 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
11110 rs6000_stack_t *info = rs6000_stack_info ();
11112 if (! HAVE_epilogue)
11114 rtx insn = get_last_insn ();
11115 /* If the last insn was a BARRIER, we don't have to write anything except
11116 the trace table. */
11117 if (GET_CODE (insn) == NOTE)
11118 insn = prev_nonnote_insn (insn);
11119 if (insn == 0 || GET_CODE (insn) != BARRIER)
11121 /* This is slightly ugly, but at least we don't have two
11122 copies of the epilogue-emitting code. */
11123 start_sequence ();
11125 /* A NOTE_INSN_DELETED is supposed to be at the start
11126 and end of the "toplevel" insn chain. */
11127 emit_note (0, NOTE_INSN_DELETED);
11128 rs6000_emit_epilogue (FALSE);
11129 emit_note (0, NOTE_INSN_DELETED);
11131 /* Expand INSN_ADDRESSES so final() doesn't crash. */
11133 rtx insn;
11134 unsigned addr = 0;
11135 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
11137 INSN_ADDRESSES_NEW (insn, addr);
11138 addr += 4;
11142 if (TARGET_DEBUG_STACK)
11143 debug_rtx_list (get_insns (), 100);
11144 final (get_insns (), file, FALSE, FALSE);
11145 end_sequence ();
11149 /* Output a traceback table here. See /usr/include/sys/debug.h for info
11150 on its format.
11152 We don't output a traceback table if -finhibit-size-directive was
11153 used. The documentation for -finhibit-size-directive reads
11154 ``don't output a @code{.size} assembler directive, or anything
11155 else that would cause trouble if the function is split in the
11156 middle, and the two halves are placed at locations far apart in
11157 memory.'' The traceback table has this property, since it
11158 includes the offset from the start of the function to the
11159 traceback table itself.
11161 System V.4 Powerpc's (and the embedded ABI derived from it) use a
11162 different traceback table. */
11163 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
11164 && rs6000_traceback != traceback_none)
11166 const char *fname = NULL;
11167 const char *language_string = lang_hooks.name;
11168 int fixed_parms = 0, float_parms = 0, parm_info = 0;
11169 int i;
11170 int optional_tbtab;
11172 if (rs6000_traceback == traceback_full)
11173 optional_tbtab = 1;
11174 else if (rs6000_traceback == traceback_part)
11175 optional_tbtab = 0;
11176 else
11177 optional_tbtab = !optimize_size && !TARGET_ELF;
11179 if (optional_tbtab)
11181 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
11182 while (*fname == '.') /* V.4 encodes . in the name */
11183 fname++;
11185 /* Need label immediately before tbtab, so we can compute
11186 its offset from the function start. */
11187 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
11188 ASM_OUTPUT_LABEL (file, fname);
11191 /* The .tbtab pseudo-op can only be used for the first eight
11192 expressions, since it can't handle the possibly variable
11193 length fields that follow. However, if you omit the optional
11194 fields, the assembler outputs zeros for all optional fields
11195 anyways, giving each variable length field is minimum length
11196 (as defined in sys/debug.h). Thus we can not use the .tbtab
11197 pseudo-op at all. */
11199 /* An all-zero word flags the start of the tbtab, for debuggers
11200 that have to find it by searching forward from the entry
11201 point or from the current pc. */
11202 fputs ("\t.long 0\n", file);
11204 /* Tbtab format type. Use format type 0. */
11205 fputs ("\t.byte 0,", file);
11207 /* Language type. Unfortunately, there doesn't seem to be any
11208 official way to get this info, so we use language_string. C
11209 is 0. C++ is 9. No number defined for Obj-C, so use the
11210 value for C for now. There is no official value for Java,
11211 although IBM appears to be using 13. There is no official value
11212 for Chill, so we've chosen 44 pseudo-randomly. */
11213 if (! strcmp (language_string, "GNU C")
11214 || ! strcmp (language_string, "GNU Objective-C"))
11215 i = 0;
11216 else if (! strcmp (language_string, "GNU F77"))
11217 i = 1;
11218 else if (! strcmp (language_string, "GNU Ada"))
11219 i = 3;
11220 else if (! strcmp (language_string, "GNU Pascal"))
11221 i = 2;
11222 else if (! strcmp (language_string, "GNU C++"))
11223 i = 9;
11224 else if (! strcmp (language_string, "GNU Java"))
11225 i = 13;
11226 else if (! strcmp (language_string, "GNU CHILL"))
11227 i = 44;
11228 else
11229 abort ();
11230 fprintf (file, "%d,", i);
11232 /* 8 single bit fields: global linkage (not set for C extern linkage,
11233 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
11234 from start of procedure stored in tbtab, internal function, function
11235 has controlled storage, function has no toc, function uses fp,
11236 function logs/aborts fp operations. */
11237 /* Assume that fp operations are used if any fp reg must be saved. */
11238 fprintf (file, "%d,",
11239 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
11241 /* 6 bitfields: function is interrupt handler, name present in
11242 proc table, function calls alloca, on condition directives
11243 (controls stack walks, 3 bits), saves condition reg, saves
11244 link reg. */
11245 /* The `function calls alloca' bit seems to be set whenever reg 31 is
11246 set up as a frame pointer, even when there is no alloca call. */
11247 fprintf (file, "%d,",
11248 ((optional_tbtab << 6)
11249 | ((optional_tbtab & frame_pointer_needed) << 5)
11250 | (info->cr_save_p << 1)
11251 | (info->lr_save_p)));
11253 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
11254 (6 bits). */
11255 fprintf (file, "%d,",
11256 (info->push_p << 7) | (64 - info->first_fp_reg_save));
11258 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
11259 fprintf (file, "%d,", (32 - first_reg_to_save ()));
11261 if (optional_tbtab)
11263 /* Compute the parameter info from the function decl argument
11264 list. */
11265 tree decl;
11266 int next_parm_info_bit = 31;
11268 for (decl = DECL_ARGUMENTS (current_function_decl);
11269 decl; decl = TREE_CHAIN (decl))
11271 rtx parameter = DECL_INCOMING_RTL (decl);
11272 enum machine_mode mode = GET_MODE (parameter);
11274 if (GET_CODE (parameter) == REG)
11276 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
11278 int bits;
11280 float_parms++;
11282 if (mode == SFmode)
11283 bits = 0x2;
11284 else if (mode == DFmode || mode == TFmode)
11285 bits = 0x3;
11286 else
11287 abort ();
11289 /* If only one bit will fit, don't or in this entry. */
11290 if (next_parm_info_bit > 0)
11291 parm_info |= (bits << (next_parm_info_bit - 1));
11292 next_parm_info_bit -= 2;
11294 else
11296 fixed_parms += ((GET_MODE_SIZE (mode)
11297 + (UNITS_PER_WORD - 1))
11298 / UNITS_PER_WORD);
11299 next_parm_info_bit -= 1;
11305 /* Number of fixed point parameters. */
11306 /* This is actually the number of words of fixed point parameters; thus
11307 an 8 byte struct counts as 2; and thus the maximum value is 8. */
11308 fprintf (file, "%d,", fixed_parms);
11310 /* 2 bitfields: number of floating point parameters (7 bits), parameters
11311 all on stack. */
11312 /* This is actually the number of fp registers that hold parameters;
11313 and thus the maximum value is 13. */
11314 /* Set parameters on stack bit if parameters are not in their original
11315 registers, regardless of whether they are on the stack? Xlc
11316 seems to set the bit when not optimizing. */
11317 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
11319 if (! optional_tbtab)
11320 return;
11322 /* Optional fields follow. Some are variable length. */
11324 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
11325 11 double float. */
11326 /* There is an entry for each parameter in a register, in the order that
11327 they occur in the parameter list. Any intervening arguments on the
11328 stack are ignored. If the list overflows a long (max possible length
11329 34 bits) then completely leave off all elements that don't fit. */
11330 /* Only emit this long if there was at least one parameter. */
11331 if (fixed_parms || float_parms)
11332 fprintf (file, "\t.long %d\n", parm_info);
11334 /* Offset from start of code to tb table. */
11335 fputs ("\t.long ", file);
11336 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
11337 #if TARGET_AIX
11338 RS6000_OUTPUT_BASENAME (file, fname);
11339 #else
11340 assemble_name (file, fname);
11341 #endif
11342 fputs ("-.", file);
11343 #if TARGET_AIX
11344 RS6000_OUTPUT_BASENAME (file, fname);
11345 #else
11346 assemble_name (file, fname);
11347 #endif
11348 putc ('\n', file);
11350 /* Interrupt handler mask. */
11351 /* Omit this long, since we never set the interrupt handler bit
11352 above. */
11354 /* Number of CTL (controlled storage) anchors. */
11355 /* Omit this long, since the has_ctl bit is never set above. */
11357 /* Displacement into stack of each CTL anchor. */
11358 /* Omit this list of longs, because there are no CTL anchors. */
11360 /* Length of function name. */
11361 if (*fname == '*')
11362 ++fname;
11363 fprintf (file, "\t.short %d\n", (int) strlen (fname));
11365 /* Function name. */
11366 assemble_string (fname, strlen (fname));
11368 /* Register for alloca automatic storage; this is always reg 31.
11369 Only emit this if the alloca bit was set above. */
11370 if (frame_pointer_needed)
11371 fputs ("\t.byte 31\n", file);
11373 fputs ("\t.align 2\n", file);
11377 /* A C compound statement that outputs the assembler code for a thunk
11378 function, used to implement C++ virtual function calls with
11379 multiple inheritance. The thunk acts as a wrapper around a virtual
11380 function, adjusting the implicit object parameter before handing
11381 control off to the real function.
11383 First, emit code to add the integer DELTA to the location that
11384 contains the incoming first argument. Assume that this argument
11385 contains a pointer, and is the one used to pass the `this' pointer
11386 in C++. This is the incoming argument *before* the function
11387 prologue, e.g. `%o0' on a sparc. The addition must preserve the
11388 values of all other incoming arguments.
11390 After the addition, emit code to jump to FUNCTION, which is a
11391 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
11392 not touch the return address. Hence returning from FUNCTION will
11393 return to whoever called the current `thunk'.
11395 The effect must be as if FUNCTION had been called directly with the
11396 adjusted first argument. This macro is responsible for emitting
11397 all of the code for a thunk function; output_function_prologue()
11398 and output_function_epilogue() are not invoked.
11400 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
11401 been extracted from it.) It might possibly be useful on some
11402 targets, but probably not.
11404 If you do not define this macro, the target-independent code in the
11405 C++ frontend will generate a less efficient heavyweight thunk that
11406 calls FUNCTION instead of jumping to it. The generic approach does
11407 not support varargs. */
11409 static void
11410 rs6000_output_mi_thunk (file, thunk_fndecl, delta, vcall_offset, function)
11411 FILE *file;
11412 tree thunk_fndecl ATTRIBUTE_UNUSED;
11413 HOST_WIDE_INT delta;
11414 HOST_WIDE_INT vcall_offset;
11415 tree function;
11417 rtx this, insn, funexp;
11419 reload_completed = 1;
11420 no_new_pseudos = 1;
11422 /* Mark the end of the (empty) prologue. */
11423 emit_note (NULL, NOTE_INSN_PROLOGUE_END);
11425 /* Find the "this" pointer. If the function returns a structure,
11426 the structure return pointer is in r3. */
11427 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function))))
11428 this = gen_rtx_REG (Pmode, 4);
11429 else
11430 this = gen_rtx_REG (Pmode, 3);
11432 /* Apply the constant offset, if required. */
11433 if (delta)
11435 rtx delta_rtx = GEN_INT (delta);
11436 emit_insn (TARGET_32BIT
11437 ? gen_addsi3 (this, this, delta_rtx)
11438 : gen_adddi3 (this, this, delta_rtx));
11441 /* Apply the offset from the vtable, if required. */
11442 if (vcall_offset)
11444 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
11445 rtx tmp = gen_rtx_REG (Pmode, 12);
11447 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
11448 emit_insn (TARGET_32BIT
11449 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
11450 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
11451 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
11452 emit_insn (TARGET_32BIT
11453 ? gen_addsi3 (this, this, tmp)
11454 : gen_adddi3 (this, this, tmp));
11457 /* Generate a tail call to the target function. */
11458 if (!TREE_USED (function))
11460 assemble_external (function);
11461 TREE_USED (function) = 1;
11463 funexp = XEXP (DECL_RTL (function), 0);
11465 SYMBOL_REF_FLAG (funexp) = 0;
11466 if (current_file_function_operand (funexp, VOIDmode)
11467 && (! lookup_attribute ("longcall",
11468 TYPE_ATTRIBUTES (TREE_TYPE (function)))
11469 || lookup_attribute ("shortcall",
11470 TYPE_ATTRIBUTES (TREE_TYPE (function)))))
11471 SYMBOL_REF_FLAG (funexp) = 1;
11473 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
11475 #if TARGET_MACHO
11476 if (flag_pic)
11477 funexp = machopic_indirect_call_target (funexp);
11478 #endif
11480 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
11481 generate sibcall RTL explicitly to avoid constraint abort. */
11482 insn = emit_call_insn (
11483 gen_rtx_PARALLEL (VOIDmode,
11484 gen_rtvec (4,
11485 gen_rtx_CALL (VOIDmode,
11486 funexp, const0_rtx),
11487 gen_rtx_USE (VOIDmode, const0_rtx),
11488 gen_rtx_USE (VOIDmode,
11489 gen_rtx_REG (SImode,
11490 LINK_REGISTER_REGNUM)),
11491 gen_rtx_RETURN (VOIDmode))));
11492 SIBLING_CALL_P (insn) = 1;
11493 emit_barrier ();
11495 /* Run just enough of rest_of_compilation to get the insns emitted.
11496 There's not really enough bulk here to make other passes such as
11497 instruction scheduling worth while. Note that use_thunk calls
11498 assemble_start_function and assemble_end_function. */
11499 insn = get_insns ();
11500 shorten_branches (insn);
11501 final_start_function (insn, file, 1);
11502 final (insn, file, 1, 0);
11503 final_end_function ();
11505 reload_completed = 0;
11506 no_new_pseudos = 0;
11509 /* A quick summary of the various types of 'constant-pool tables'
11510 under PowerPC:
11512 Target Flags Name One table per
11513 AIX (none) AIX TOC object file
11514 AIX -mfull-toc AIX TOC object file
11515 AIX -mminimal-toc AIX minimal TOC translation unit
11516 SVR4/EABI (none) SVR4 SDATA object file
11517 SVR4/EABI -fpic SVR4 pic object file
11518 SVR4/EABI -fPIC SVR4 PIC translation unit
11519 SVR4/EABI -mrelocatable EABI TOC function
11520 SVR4/EABI -maix AIX TOC object file
11521 SVR4/EABI -maix -mminimal-toc
11522 AIX minimal TOC translation unit
11524 Name Reg. Set by entries contains:
11525 made by addrs? fp? sum?
11527 AIX TOC 2 crt0 as Y option option
11528 AIX minimal TOC 30 prolog gcc Y Y option
11529 SVR4 SDATA 13 crt0 gcc N Y N
11530 SVR4 pic 30 prolog ld Y not yet N
11531 SVR4 PIC 30 prolog gcc Y option option
11532 EABI TOC 30 prolog gcc Y option option
11536 /* Hash functions for the hash table. */
11538 static unsigned
11539 rs6000_hash_constant (k)
11540 rtx k;
11542 enum rtx_code code = GET_CODE (k);
11543 enum machine_mode mode = GET_MODE (k);
11544 unsigned result = (code << 3) ^ mode;
11545 const char *format;
11546 int flen, fidx;
11548 format = GET_RTX_FORMAT (code);
11549 flen = strlen (format);
11550 fidx = 0;
11552 switch (code)
11554 case LABEL_REF:
11555 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
11557 case CONST_DOUBLE:
11558 if (mode != VOIDmode)
11559 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
11560 flen = 2;
11561 break;
11563 case CODE_LABEL:
11564 fidx = 3;
11565 break;
11567 default:
11568 break;
11571 for (; fidx < flen; fidx++)
11572 switch (format[fidx])
11574 case 's':
11576 unsigned i, len;
11577 const char *str = XSTR (k, fidx);
11578 len = strlen (str);
11579 result = result * 613 + len;
11580 for (i = 0; i < len; i++)
11581 result = result * 613 + (unsigned) str[i];
11582 break;
11584 case 'u':
11585 case 'e':
11586 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
11587 break;
11588 case 'i':
11589 case 'n':
11590 result = result * 613 + (unsigned) XINT (k, fidx);
11591 break;
11592 case 'w':
11593 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
11594 result = result * 613 + (unsigned) XWINT (k, fidx);
11595 else
11597 size_t i;
11598 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
11599 result = result * 613 + (unsigned) (XWINT (k, fidx)
11600 >> CHAR_BIT * i);
11602 break;
11603 default:
11604 abort ();
11607 return result;
11610 static unsigned
11611 toc_hash_function (hash_entry)
11612 const void * hash_entry;
11614 const struct toc_hash_struct *thc =
11615 (const struct toc_hash_struct *) hash_entry;
11616 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
11619 /* Compare H1 and H2 for equivalence. */
11621 static int
11622 toc_hash_eq (h1, h2)
11623 const void * h1;
11624 const void * h2;
11626 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
11627 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
11629 if (((const struct toc_hash_struct *) h1)->key_mode
11630 != ((const struct toc_hash_struct *) h2)->key_mode)
11631 return 0;
11633 return rtx_equal_p (r1, r2);
11636 /* These are the names given by the C++ front-end to vtables, and
11637 vtable-like objects. Ideally, this logic should not be here;
11638 instead, there should be some programmatic way of inquiring as
11639 to whether or not an object is a vtable. */
11641 #define VTABLE_NAME_P(NAME) \
11642 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
11643 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
11644 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
11645 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
11647 void
11648 rs6000_output_symbol_ref (file, x)
11649 FILE *file;
11650 rtx x;
11652 /* Currently C++ toc references to vtables can be emitted before it
11653 is decided whether the vtable is public or private. If this is
11654 the case, then the linker will eventually complain that there is
11655 a reference to an unknown section. Thus, for vtables only,
11656 we emit the TOC reference to reference the symbol and not the
11657 section. */
11658 const char *name = XSTR (x, 0);
11660 if (VTABLE_NAME_P (name))
11662 RS6000_OUTPUT_BASENAME (file, name);
11664 else
11665 assemble_name (file, name);
11668 /* Output a TOC entry. We derive the entry name from what is being
11669 written. */
11671 void
11672 output_toc (file, x, labelno, mode)
11673 FILE *file;
11674 rtx x;
11675 int labelno;
11676 enum machine_mode mode;
11678 char buf[256];
11679 const char *name = buf;
11680 const char *real_name;
11681 rtx base = x;
11682 int offset = 0;
11684 if (TARGET_NO_TOC)
11685 abort ();
11687 /* When the linker won't eliminate them, don't output duplicate
11688 TOC entries (this happens on AIX if there is any kind of TOC,
11689 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
11690 CODE_LABELs. */
11691 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
11693 struct toc_hash_struct *h;
11694 void * * found;
11696 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
11697 time because GGC is not initialised at that point. */
11698 if (toc_hash_table == NULL)
11699 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
11700 toc_hash_eq, NULL);
11702 h = ggc_alloc (sizeof (*h));
11703 h->key = x;
11704 h->key_mode = mode;
11705 h->labelno = labelno;
11707 found = htab_find_slot (toc_hash_table, h, 1);
11708 if (*found == NULL)
11709 *found = h;
11710 else /* This is indeed a duplicate.
11711 Set this label equal to that label. */
11713 fputs ("\t.set ", file);
11714 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
11715 fprintf (file, "%d,", labelno);
11716 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
11717 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
11718 found)->labelno));
11719 return;
11723 /* If we're going to put a double constant in the TOC, make sure it's
11724 aligned properly when strict alignment is on. */
11725 if (GET_CODE (x) == CONST_DOUBLE
11726 && STRICT_ALIGNMENT
11727 && GET_MODE_BITSIZE (mode) >= 64
11728 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
11729 ASM_OUTPUT_ALIGN (file, 3);
11732 (*targetm.asm_out.internal_label) (file, "LC", labelno);
11734 /* Handle FP constants specially. Note that if we have a minimal
11735 TOC, things we put here aren't actually in the TOC, so we can allow
11736 FP constants. */
11737 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == TFmode)
11739 REAL_VALUE_TYPE rv;
11740 long k[4];
11742 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
11743 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
11745 if (TARGET_64BIT)
11747 if (TARGET_MINIMAL_TOC)
11748 fputs (DOUBLE_INT_ASM_OP, file);
11749 else
11750 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
11751 k[0] & 0xffffffff, k[1] & 0xffffffff,
11752 k[2] & 0xffffffff, k[3] & 0xffffffff);
11753 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
11754 k[0] & 0xffffffff, k[1] & 0xffffffff,
11755 k[2] & 0xffffffff, k[3] & 0xffffffff);
11756 return;
11758 else
11760 if (TARGET_MINIMAL_TOC)
11761 fputs ("\t.long ", file);
11762 else
11763 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
11764 k[0] & 0xffffffff, k[1] & 0xffffffff,
11765 k[2] & 0xffffffff, k[3] & 0xffffffff);
11766 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
11767 k[0] & 0xffffffff, k[1] & 0xffffffff,
11768 k[2] & 0xffffffff, k[3] & 0xffffffff);
11769 return;
11772 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
11774 REAL_VALUE_TYPE rv;
11775 long k[2];
11777 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
11778 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
11780 if (TARGET_64BIT)
11782 if (TARGET_MINIMAL_TOC)
11783 fputs (DOUBLE_INT_ASM_OP, file);
11784 else
11785 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
11786 k[0] & 0xffffffff, k[1] & 0xffffffff);
11787 fprintf (file, "0x%lx%08lx\n",
11788 k[0] & 0xffffffff, k[1] & 0xffffffff);
11789 return;
11791 else
11793 if (TARGET_MINIMAL_TOC)
11794 fputs ("\t.long ", file);
11795 else
11796 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
11797 k[0] & 0xffffffff, k[1] & 0xffffffff);
11798 fprintf (file, "0x%lx,0x%lx\n",
11799 k[0] & 0xffffffff, k[1] & 0xffffffff);
11800 return;
11803 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
11805 REAL_VALUE_TYPE rv;
11806 long l;
11808 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
11809 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
11811 if (TARGET_64BIT)
11813 if (TARGET_MINIMAL_TOC)
11814 fputs (DOUBLE_INT_ASM_OP, file);
11815 else
11816 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
11817 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
11818 return;
11820 else
11822 if (TARGET_MINIMAL_TOC)
11823 fputs ("\t.long ", file);
11824 else
11825 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
11826 fprintf (file, "0x%lx\n", l & 0xffffffff);
11827 return;
11830 else if (GET_MODE (x) == VOIDmode
11831 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
11833 unsigned HOST_WIDE_INT low;
11834 HOST_WIDE_INT high;
11836 if (GET_CODE (x) == CONST_DOUBLE)
11838 low = CONST_DOUBLE_LOW (x);
11839 high = CONST_DOUBLE_HIGH (x);
11841 else
11842 #if HOST_BITS_PER_WIDE_INT == 32
11844 low = INTVAL (x);
11845 high = (low & 0x80000000) ? ~0 : 0;
11847 #else
11849 low = INTVAL (x) & 0xffffffff;
11850 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
11852 #endif
11854 /* TOC entries are always Pmode-sized, but since this
11855 is a bigendian machine then if we're putting smaller
11856 integer constants in the TOC we have to pad them.
11857 (This is still a win over putting the constants in
11858 a separate constant pool, because then we'd have
11859 to have both a TOC entry _and_ the actual constant.)
11861 For a 32-bit target, CONST_INT values are loaded and shifted
11862 entirely within `low' and can be stored in one TOC entry. */
11864 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
11865 abort ();/* It would be easy to make this work, but it doesn't now. */
11867 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
11869 #if HOST_BITS_PER_WIDE_INT == 32
11870 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
11871 POINTER_SIZE, &low, &high, 0);
11872 #else
11873 low |= high << 32;
11874 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
11875 high = (HOST_WIDE_INT) low >> 32;
11876 low &= 0xffffffff;
11877 #endif
11880 if (TARGET_64BIT)
11882 if (TARGET_MINIMAL_TOC)
11883 fputs (DOUBLE_INT_ASM_OP, file);
11884 else
11885 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
11886 (long) high & 0xffffffff, (long) low & 0xffffffff);
11887 fprintf (file, "0x%lx%08lx\n",
11888 (long) high & 0xffffffff, (long) low & 0xffffffff);
11889 return;
11891 else
11893 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
11895 if (TARGET_MINIMAL_TOC)
11896 fputs ("\t.long ", file);
11897 else
11898 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
11899 (long) high & 0xffffffff, (long) low & 0xffffffff);
11900 fprintf (file, "0x%lx,0x%lx\n",
11901 (long) high & 0xffffffff, (long) low & 0xffffffff);
11903 else
11905 if (TARGET_MINIMAL_TOC)
11906 fputs ("\t.long ", file);
11907 else
11908 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
11909 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
11911 return;
11915 if (GET_CODE (x) == CONST)
11917 if (GET_CODE (XEXP (x, 0)) != PLUS)
11918 abort ();
11920 base = XEXP (XEXP (x, 0), 0);
11921 offset = INTVAL (XEXP (XEXP (x, 0), 1));
11924 if (GET_CODE (base) == SYMBOL_REF)
11925 name = XSTR (base, 0);
11926 else if (GET_CODE (base) == LABEL_REF)
11927 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
11928 else if (GET_CODE (base) == CODE_LABEL)
11929 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
11930 else
11931 abort ();
11933 real_name = (*targetm.strip_name_encoding) (name);
11934 if (TARGET_MINIMAL_TOC)
11935 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
11936 else
11938 fprintf (file, "\t.tc %s", real_name);
11940 if (offset < 0)
11941 fprintf (file, ".N%d", - offset);
11942 else if (offset)
11943 fprintf (file, ".P%d", offset);
11945 fputs ("[TC],", file);
11948 /* Currently C++ toc references to vtables can be emitted before it
11949 is decided whether the vtable is public or private. If this is
11950 the case, then the linker will eventually complain that there is
11951 a TOC reference to an unknown section. Thus, for vtables only,
11952 we emit the TOC reference to reference the symbol and not the
11953 section. */
11954 if (VTABLE_NAME_P (name))
11956 RS6000_OUTPUT_BASENAME (file, name);
11957 if (offset < 0)
11958 fprintf (file, "%d", offset);
11959 else if (offset > 0)
11960 fprintf (file, "+%d", offset);
11962 else
11963 output_addr_const (file, x);
11964 putc ('\n', file);
11967 /* Output an assembler pseudo-op to write an ASCII string of N characters
11968 starting at P to FILE.
11970 On the RS/6000, we have to do this using the .byte operation and
11971 write out special characters outside the quoted string.
11972 Also, the assembler is broken; very long strings are truncated,
11973 so we must artificially break them up early. */
11975 void
11976 output_ascii (file, p, n)
11977 FILE *file;
11978 const char *p;
11979 int n;
11981 char c;
11982 int i, count_string;
11983 const char *for_string = "\t.byte \"";
11984 const char *for_decimal = "\t.byte ";
11985 const char *to_close = NULL;
11987 count_string = 0;
11988 for (i = 0; i < n; i++)
11990 c = *p++;
11991 if (c >= ' ' && c < 0177)
11993 if (for_string)
11994 fputs (for_string, file);
11995 putc (c, file);
11997 /* Write two quotes to get one. */
11998 if (c == '"')
12000 putc (c, file);
12001 ++count_string;
12004 for_string = NULL;
12005 for_decimal = "\"\n\t.byte ";
12006 to_close = "\"\n";
12007 ++count_string;
12009 if (count_string >= 512)
12011 fputs (to_close, file);
12013 for_string = "\t.byte \"";
12014 for_decimal = "\t.byte ";
12015 to_close = NULL;
12016 count_string = 0;
12019 else
12021 if (for_decimal)
12022 fputs (for_decimal, file);
12023 fprintf (file, "%d", c);
12025 for_string = "\n\t.byte \"";
12026 for_decimal = ", ";
12027 to_close = "\n";
12028 count_string = 0;
12032 /* Now close the string if we have written one. Then end the line. */
12033 if (to_close)
12034 fputs (to_close, file);
12037 /* Generate a unique section name for FILENAME for a section type
12038 represented by SECTION_DESC. Output goes into BUF.
12040 SECTION_DESC can be any string, as long as it is different for each
12041 possible section type.
12043 We name the section in the same manner as xlc. The name begins with an
12044 underscore followed by the filename (after stripping any leading directory
12045 names) with the last period replaced by the string SECTION_DESC. If
12046 FILENAME does not contain a period, SECTION_DESC is appended to the end of
12047 the name. */
12049 void
12050 rs6000_gen_section_name (buf, filename, section_desc)
12051 char **buf;
12052 const char *filename;
12053 const char *section_desc;
12055 const char *q, *after_last_slash, *last_period = 0;
12056 char *p;
12057 int len;
12059 after_last_slash = filename;
12060 for (q = filename; *q; q++)
12062 if (*q == '/')
12063 after_last_slash = q + 1;
12064 else if (*q == '.')
12065 last_period = q;
12068 len = strlen (after_last_slash) + strlen (section_desc) + 2;
12069 *buf = (char *) xmalloc (len);
12071 p = *buf;
12072 *p++ = '_';
12074 for (q = after_last_slash; *q; q++)
12076 if (q == last_period)
12078 strcpy (p, section_desc);
12079 p += strlen (section_desc);
12080 break;
12083 else if (ISALNUM (*q))
12084 *p++ = *q;
12087 if (last_period == 0)
12088 strcpy (p, section_desc);
12089 else
12090 *p = '\0';
12093 /* Emit profile function. */
12095 void
12096 output_profile_hook (labelno)
12097 int labelno ATTRIBUTE_UNUSED;
12099 if (DEFAULT_ABI == ABI_AIX)
12101 #ifdef NO_PROFILE_COUNTERS
12102 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
12103 #else
12104 char buf[30];
12105 const char *label_name;
12106 rtx fun;
12108 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
12109 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
12110 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
12112 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
12113 fun, Pmode);
12114 #endif
12116 else if (DEFAULT_ABI == ABI_DARWIN)
12118 const char *mcount_name = RS6000_MCOUNT;
12119 int caller_addr_regno = LINK_REGISTER_REGNUM;
12121 /* Be conservative and always set this, at least for now. */
12122 current_function_uses_pic_offset_table = 1;
12124 #if TARGET_MACHO
12125 /* For PIC code, set up a stub and collect the caller's address
12126 from r0, which is where the prologue puts it. */
12127 if (flag_pic)
12129 mcount_name = machopic_stub_name (mcount_name);
12130 if (current_function_uses_pic_offset_table)
12131 caller_addr_regno = 0;
12133 #endif
12134 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
12135 0, VOIDmode, 1,
12136 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
12140 /* Write function profiler code. */
12142 void
12143 output_function_profiler (file, labelno)
12144 FILE *file;
12145 int labelno;
12147 char buf[100];
12148 int save_lr = 8;
12150 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
12151 switch (DEFAULT_ABI)
12153 default:
12154 abort ();
12156 case ABI_V4:
12157 save_lr = 4;
12158 /* Fall through. */
12160 case ABI_AIX_NODESC:
12161 if (!TARGET_32BIT)
12163 warning ("no profiling of 64-bit code for this ABI");
12164 return;
12166 fprintf (file, "\tmflr %s\n", reg_names[0]);
12167 if (flag_pic == 1)
12169 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
12170 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12171 reg_names[0], save_lr, reg_names[1]);
12172 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
12173 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
12174 assemble_name (file, buf);
12175 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
12177 else if (flag_pic > 1)
12179 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12180 reg_names[0], save_lr, reg_names[1]);
12181 /* Now, we need to get the address of the label. */
12182 fputs ("\tbl 1f\n\t.long ", file);
12183 assemble_name (file, buf);
12184 fputs ("-.\n1:", file);
12185 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
12186 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
12187 reg_names[0], reg_names[11]);
12188 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
12189 reg_names[0], reg_names[0], reg_names[11]);
12191 else
12193 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
12194 assemble_name (file, buf);
12195 fputs ("@ha\n", file);
12196 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12197 reg_names[0], save_lr, reg_names[1]);
12198 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
12199 assemble_name (file, buf);
12200 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
12203 if (current_function_needs_context && DEFAULT_ABI == ABI_AIX_NODESC)
12205 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12206 reg_names[STATIC_CHAIN_REGNUM],
12207 12, reg_names[1]);
12208 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
12209 asm_fprintf (file, "\t{l|lwz} %s,%d(%s)\n",
12210 reg_names[STATIC_CHAIN_REGNUM],
12211 12, reg_names[1]);
12213 else
12214 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
12215 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
12216 break;
12218 case ABI_AIX:
12219 case ABI_DARWIN:
12220 /* Don't do anything, done in output_profile_hook (). */
12221 break;
12226 static int
12227 rs6000_use_dfa_pipeline_interface ()
12229 return 1;
12232 static int
12233 rs6000_multipass_dfa_lookahead ()
12235 if (rs6000_cpu == PROCESSOR_POWER4)
12236 return 4;
12237 else
12238 return 1;
12241 /* Power4 load update and store update instructions are cracked into a
12242 load or store and an integer insn which are executed in the same cycle.
12243 Branches have their own dispatch slot which does not count against the
12244 GCC issue rate, but it changes the program flow so there are no other
12245 instructions to issue in this cycle. */
12247 static int
12248 rs6000_variable_issue (stream, verbose, insn, more)
12249 FILE *stream ATTRIBUTE_UNUSED;
12250 int verbose ATTRIBUTE_UNUSED;
12251 rtx insn;
12252 int more;
12254 if (GET_CODE (PATTERN (insn)) == USE
12255 || GET_CODE (PATTERN (insn)) == CLOBBER)
12256 return more;
12258 if (rs6000_cpu == PROCESSOR_POWER4)
12260 enum attr_type type = get_attr_type (insn);
12261 if (type == TYPE_LOAD_EXT_U || type == TYPE_LOAD_EXT_UX
12262 || type == TYPE_LOAD_UX || type == TYPE_STORE_UX
12263 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX)
12264 return 0;
12265 else if (type == TYPE_LOAD_U || type == TYPE_STORE_U
12266 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
12267 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR)
12268 return more - 2;
12269 else
12270 return more - 1;
12272 else
12273 return more - 1;
12276 /* Adjust the cost of a scheduling dependency. Return the new cost of
12277 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
12279 static int
12280 rs6000_adjust_cost (insn, link, dep_insn, cost)
12281 rtx insn;
12282 rtx link;
12283 rtx dep_insn ATTRIBUTE_UNUSED;
12284 int cost;
12286 if (! recog_memoized (insn))
12287 return 0;
12289 if (REG_NOTE_KIND (link) != 0)
12290 return 0;
12292 if (REG_NOTE_KIND (link) == 0)
12294 /* Data dependency; DEP_INSN writes a register that INSN reads
12295 some cycles later. */
12296 switch (get_attr_type (insn))
12298 case TYPE_JMPREG:
12299 /* Tell the first scheduling pass about the latency between
12300 a mtctr and bctr (and mtlr and br/blr). The first
12301 scheduling pass will not know about this latency since
12302 the mtctr instruction, which has the latency associated
12303 to it, will be generated by reload. */
12304 return TARGET_POWER ? 5 : 4;
12305 case TYPE_BRANCH:
12306 /* Leave some extra cycles between a compare and its
12307 dependent branch, to inhibit expensive mispredicts. */
12308 if ((rs6000_cpu_attr == CPU_PPC603
12309 || rs6000_cpu_attr == CPU_PPC604
12310 || rs6000_cpu_attr == CPU_PPC604E
12311 || rs6000_cpu_attr == CPU_PPC620
12312 || rs6000_cpu_attr == CPU_PPC630
12313 || rs6000_cpu_attr == CPU_PPC750
12314 || rs6000_cpu_attr == CPU_PPC7400
12315 || rs6000_cpu_attr == CPU_PPC7450
12316 || rs6000_cpu_attr == CPU_POWER4)
12317 && recog_memoized (dep_insn)
12318 && (INSN_CODE (dep_insn) >= 0)
12319 && (get_attr_type (dep_insn) == TYPE_CMP
12320 || get_attr_type (dep_insn) == TYPE_COMPARE
12321 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
12322 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
12323 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL
12324 || get_attr_type (dep_insn) == TYPE_DELAYED_CR))
12325 return cost + 2;
12326 default:
12327 break;
12329 /* Fall out to return default cost. */
12332 return cost;
12335 /* A C statement (sans semicolon) to update the integer scheduling
12336 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
12337 INSN earlier, increase the priority to execute INSN later. Do not
12338 define this macro if you do not need to adjust the scheduling
12339 priorities of insns. */
12341 static int
12342 rs6000_adjust_priority (insn, priority)
12343 rtx insn ATTRIBUTE_UNUSED;
12344 int priority;
12346 /* On machines (like the 750) which have asymmetric integer units,
12347 where one integer unit can do multiply and divides and the other
12348 can't, reduce the priority of multiply/divide so it is scheduled
12349 before other integer operations. */
12351 #if 0
12352 if (! INSN_P (insn))
12353 return priority;
12355 if (GET_CODE (PATTERN (insn)) == USE)
12356 return priority;
12358 switch (rs6000_cpu_attr) {
12359 case CPU_PPC750:
12360 switch (get_attr_type (insn))
12362 default:
12363 break;
12365 case TYPE_IMUL:
12366 case TYPE_IDIV:
12367 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
12368 priority, priority);
12369 if (priority >= 0 && priority < 0x01000000)
12370 priority >>= 3;
12371 break;
12374 #endif
12376 return priority;
12379 /* Return how many instructions the machine can issue per cycle. */
12381 static int
12382 rs6000_issue_rate ()
12384 switch (rs6000_cpu_attr) {
12385 case CPU_RIOS1: /* ? */
12386 case CPU_RS64A:
12387 case CPU_PPC601: /* ? */
12388 case CPU_PPC7450:
12389 return 3;
12390 case CPU_PPC440:
12391 case CPU_PPC603:
12392 case CPU_PPC750:
12393 case CPU_PPC7400:
12394 return 2;
12395 case CPU_RIOS2:
12396 case CPU_PPC604:
12397 case CPU_PPC604E:
12398 case CPU_PPC620:
12399 case CPU_PPC630:
12400 case CPU_POWER4:
12401 return 4;
12402 default:
12403 return 1;
12408 /* Length in units of the trampoline for entering a nested function. */
12411 rs6000_trampoline_size ()
12413 int ret = 0;
12415 switch (DEFAULT_ABI)
12417 default:
12418 abort ();
12420 case ABI_AIX:
12421 ret = (TARGET_32BIT) ? 12 : 24;
12422 break;
12424 case ABI_DARWIN:
12425 case ABI_V4:
12426 case ABI_AIX_NODESC:
12427 ret = (TARGET_32BIT) ? 40 : 48;
12428 break;
12431 return ret;
12434 /* Emit RTL insns to initialize the variable parts of a trampoline.
12435 FNADDR is an RTX for the address of the function's pure code.
12436 CXT is an RTX for the static chain value for the function. */
12438 void
12439 rs6000_initialize_trampoline (addr, fnaddr, cxt)
12440 rtx addr;
12441 rtx fnaddr;
12442 rtx cxt;
12444 enum machine_mode pmode = Pmode;
12445 int regsize = (TARGET_32BIT) ? 4 : 8;
12446 rtx ctx_reg = force_reg (pmode, cxt);
12448 switch (DEFAULT_ABI)
12450 default:
12451 abort ();
12453 /* Macros to shorten the code expansions below. */
12454 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
12455 #define MEM_PLUS(addr,offset) \
12456 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
12458 /* Under AIX, just build the 3 word function descriptor */
12459 case ABI_AIX:
12461 rtx fn_reg = gen_reg_rtx (pmode);
12462 rtx toc_reg = gen_reg_rtx (pmode);
12463 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
12464 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
12465 emit_move_insn (MEM_DEREF (addr), fn_reg);
12466 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
12467 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
12469 break;
12471 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
12472 case ABI_DARWIN:
12473 case ABI_V4:
12474 case ABI_AIX_NODESC:
12475 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
12476 FALSE, VOIDmode, 4,
12477 addr, pmode,
12478 GEN_INT (rs6000_trampoline_size ()), SImode,
12479 fnaddr, pmode,
12480 ctx_reg, pmode);
12481 break;
12484 return;
12488 /* Table of valid machine attributes. */
12490 const struct attribute_spec rs6000_attribute_table[] =
12492 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
12493 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
12494 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
12495 { NULL, 0, 0, false, false, false, NULL }
12498 /* Handle a "longcall" or "shortcall" attribute; arguments as in
12499 struct attribute_spec.handler. */
12501 static tree
12502 rs6000_handle_longcall_attribute (node, name, args, flags, no_add_attrs)
12503 tree *node;
12504 tree name;
12505 tree args ATTRIBUTE_UNUSED;
12506 int flags ATTRIBUTE_UNUSED;
12507 bool *no_add_attrs;
12509 if (TREE_CODE (*node) != FUNCTION_TYPE
12510 && TREE_CODE (*node) != FIELD_DECL
12511 && TREE_CODE (*node) != TYPE_DECL)
12513 warning ("`%s' attribute only applies to functions",
12514 IDENTIFIER_POINTER (name));
12515 *no_add_attrs = true;
12518 return NULL_TREE;
12521 /* Set longcall attributes on all functions declared when
12522 rs6000_default_long_calls is true. */
12523 static void
12524 rs6000_set_default_type_attributes (type)
12525 tree type;
12527 if (rs6000_default_long_calls
12528 && (TREE_CODE (type) == FUNCTION_TYPE
12529 || TREE_CODE (type) == METHOD_TYPE))
12530 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
12531 NULL_TREE,
12532 TYPE_ATTRIBUTES (type));
12535 /* Return a reference suitable for calling a function with the
12536 longcall attribute. */
12538 struct rtx_def *
12539 rs6000_longcall_ref (call_ref)
12540 rtx call_ref;
12542 const char *call_name;
12543 tree node;
12545 if (GET_CODE (call_ref) != SYMBOL_REF)
12546 return call_ref;
12548 /* System V adds '.' to the internal name, so skip them. */
12549 call_name = XSTR (call_ref, 0);
12550 if (*call_name == '.')
12552 while (*call_name == '.')
12553 call_name++;
12555 node = get_identifier (call_name);
12556 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
12559 return force_reg (Pmode, call_ref);
12563 #ifdef USING_ELFOS_H
12565 /* A C statement or statements to switch to the appropriate section
12566 for output of RTX in mode MODE. You can assume that RTX is some
12567 kind of constant in RTL. The argument MODE is redundant except in
12568 the case of a `const_int' rtx. Select the section by calling
12569 `text_section' or one of the alternatives for other sections.
12571 Do not define this macro if you put all constants in the read-only
12572 data section. */
12574 static void
12575 rs6000_elf_select_rtx_section (mode, x, align)
12576 enum machine_mode mode;
12577 rtx x;
12578 unsigned HOST_WIDE_INT align;
12580 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
12581 toc_section ();
12582 else
12583 default_elf_select_rtx_section (mode, x, align);
12586 /* A C statement or statements to switch to the appropriate
12587 section for output of DECL. DECL is either a `VAR_DECL' node
12588 or a constant of some sort. RELOC indicates whether forming
12589 the initial value of DECL requires link-time relocations. */
12591 static void
12592 rs6000_elf_select_section (decl, reloc, align)
12593 tree decl;
12594 int reloc;
12595 unsigned HOST_WIDE_INT align;
12597 default_elf_select_section_1 (decl, reloc, align,
12598 flag_pic || DEFAULT_ABI == ABI_AIX);
12601 /* A C statement to build up a unique section name, expressed as a
12602 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
12603 RELOC indicates whether the initial value of EXP requires
12604 link-time relocations. If you do not define this macro, GCC will use
12605 the symbol name prefixed by `.' as the section name. Note - this
12606 macro can now be called for uninitialized data items as well as
12607 initialized data and functions. */
12609 static void
12610 rs6000_elf_unique_section (decl, reloc)
12611 tree decl;
12612 int reloc;
12614 default_unique_section_1 (decl, reloc,
12615 flag_pic || DEFAULT_ABI == ABI_AIX);
12619 /* If we are referencing a function that is static or is known to be
12620 in this file, make the SYMBOL_REF special. We can use this to indicate
12621 that we can branch to this function without emitting a no-op after the
12622 call. For real AIX calling sequences, we also replace the
12623 function name with the real name (1 or 2 leading .'s), rather than
12624 the function descriptor name. This saves a lot of overriding code
12625 to read the prefixes. */
12627 static void
12628 rs6000_elf_encode_section_info (decl, first)
12629 tree decl;
12630 int first;
12632 if (!first)
12633 return;
12635 if (TREE_CODE (decl) == FUNCTION_DECL)
12637 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
12638 if ((*targetm.binds_local_p) (decl))
12639 SYMBOL_REF_FLAG (sym_ref) = 1;
12641 if (DEFAULT_ABI == ABI_AIX)
12643 size_t len1 = (DEFAULT_ABI == ABI_AIX) ? 1 : 2;
12644 size_t len2 = strlen (XSTR (sym_ref, 0));
12645 char *str = alloca (len1 + len2 + 1);
12646 str[0] = '.';
12647 str[1] = '.';
12648 memcpy (str + len1, XSTR (sym_ref, 0), len2 + 1);
12650 XSTR (sym_ref, 0) = ggc_alloc_string (str, len1 + len2);
12653 else if (rs6000_sdata != SDATA_NONE
12654 && DEFAULT_ABI == ABI_V4
12655 && TREE_CODE (decl) == VAR_DECL)
12657 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
12658 int size = int_size_in_bytes (TREE_TYPE (decl));
12659 tree section_name = DECL_SECTION_NAME (decl);
12660 const char *name = (char *)0;
12661 int len = 0;
12663 if ((*targetm.binds_local_p) (decl))
12664 SYMBOL_REF_FLAG (sym_ref) = 1;
12666 if (section_name)
12668 if (TREE_CODE (section_name) == STRING_CST)
12670 name = TREE_STRING_POINTER (section_name);
12671 len = TREE_STRING_LENGTH (section_name);
12673 else
12674 abort ();
12677 if (name
12678 ? ((len == sizeof (".sdata") - 1
12679 && strcmp (name, ".sdata") == 0)
12680 || (len == sizeof (".sdata2") - 1
12681 && strcmp (name, ".sdata2") == 0)
12682 || (len == sizeof (".sbss") - 1
12683 && strcmp (name, ".sbss") == 0)
12684 || (len == sizeof (".sbss2") - 1
12685 && strcmp (name, ".sbss2") == 0)
12686 || (len == sizeof (".PPC.EMB.sdata0") - 1
12687 && strcmp (name, ".PPC.EMB.sdata0") == 0)
12688 || (len == sizeof (".PPC.EMB.sbss0") - 1
12689 && strcmp (name, ".PPC.EMB.sbss0") == 0))
12690 : (size > 0 && size <= g_switch_value))
12692 size_t len = strlen (XSTR (sym_ref, 0));
12693 char *str = alloca (len + 2);
12695 str[0] = '@';
12696 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
12697 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
12702 static const char *
12703 rs6000_elf_strip_name_encoding (str)
12704 const char *str;
12706 while (*str == '*' || *str == '@')
12707 str++;
12708 return str;
12711 static bool
12712 rs6000_elf_in_small_data_p (decl)
12713 tree decl;
12715 if (rs6000_sdata == SDATA_NONE)
12716 return false;
12718 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
12720 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
12721 if (strcmp (section, ".sdata") == 0
12722 || strcmp (section, ".sdata2") == 0
12723 || strcmp (section, ".sbss") == 0)
12724 return true;
12726 else
12728 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
12730 if (size > 0
12731 && size <= g_switch_value
12732 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
12733 return true;
12736 return false;
12739 #endif /* USING_ELFOS_H */
12742 /* Return a REG that occurs in ADDR with coefficient 1.
12743 ADDR can be effectively incremented by incrementing REG.
12745 r0 is special and we must not select it as an address
12746 register by this routine since our caller will try to
12747 increment the returned register via an "la" instruction. */
12749 struct rtx_def *
12750 find_addr_reg (addr)
12751 rtx addr;
12753 while (GET_CODE (addr) == PLUS)
12755 if (GET_CODE (XEXP (addr, 0)) == REG
12756 && REGNO (XEXP (addr, 0)) != 0)
12757 addr = XEXP (addr, 0);
12758 else if (GET_CODE (XEXP (addr, 1)) == REG
12759 && REGNO (XEXP (addr, 1)) != 0)
12760 addr = XEXP (addr, 1);
12761 else if (CONSTANT_P (XEXP (addr, 0)))
12762 addr = XEXP (addr, 1);
12763 else if (CONSTANT_P (XEXP (addr, 1)))
12764 addr = XEXP (addr, 0);
12765 else
12766 abort ();
12768 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
12769 return addr;
12770 abort ();
12773 void
12774 rs6000_fatal_bad_address (op)
12775 rtx op;
12777 fatal_insn ("bad address", op);
12780 #if TARGET_MACHO
12782 #if 0
12783 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
12784 reference and a constant. */
12787 symbolic_operand (op)
12788 rtx op;
12790 switch (GET_CODE (op))
12792 case SYMBOL_REF:
12793 case LABEL_REF:
12794 return 1;
12795 case CONST:
12796 op = XEXP (op, 0);
12797 return (GET_CODE (op) == SYMBOL_REF ||
12798 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
12799 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
12800 && GET_CODE (XEXP (op, 1)) == CONST_INT);
12801 default:
12802 return 0;
12805 #endif
12807 #ifdef RS6000_LONG_BRANCH
12809 static tree stub_list = 0;
12811 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
12812 procedure calls to the linked list. */
12814 void
12815 add_compiler_stub (label_name, function_name, line_number)
12816 tree label_name;
12817 tree function_name;
12818 int line_number;
12820 tree stub = build_tree_list (function_name, label_name);
12821 TREE_TYPE (stub) = build_int_2 (line_number, 0);
12822 TREE_CHAIN (stub) = stub_list;
12823 stub_list = stub;
12826 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
12827 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
12828 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
12830 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
12831 handling procedure calls from the linked list and initializes the
12832 linked list. */
12834 void
12835 output_compiler_stub ()
12837 char tmp_buf[256];
12838 char label_buf[256];
12839 tree stub;
12841 if (!flag_pic)
12842 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
12844 fprintf (asm_out_file,
12845 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
12847 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
12848 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
12849 fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
12850 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
12852 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
12853 strcpy (label_buf,
12854 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
12855 else
12857 label_buf[0] = '_';
12858 strcpy (label_buf+1,
12859 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
12862 strcpy (tmp_buf, "lis r12,hi16(");
12863 strcat (tmp_buf, label_buf);
12864 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
12865 strcat (tmp_buf, label_buf);
12866 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
12867 output_asm_insn (tmp_buf, 0);
12869 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
12870 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
12871 fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
12872 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
12875 stub_list = 0;
12878 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
12879 already there or not. */
12882 no_previous_def (function_name)
12883 tree function_name;
12885 tree stub;
12886 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
12887 if (function_name == STUB_FUNCTION_NAME (stub))
12888 return 0;
12889 return 1;
12892 /* GET_PREV_LABEL gets the label name from the previous definition of
12893 the function. */
12895 tree
12896 get_prev_label (function_name)
12897 tree function_name;
12899 tree stub;
12900 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
12901 if (function_name == STUB_FUNCTION_NAME (stub))
12902 return STUB_LABEL_NAME (stub);
12903 return 0;
12906 /* INSN is either a function call or a millicode call. It may have an
12907 unconditional jump in its delay slot.
12909 CALL_DEST is the routine we are calling. */
12911 char *
12912 output_call (insn, call_dest, operand_number)
12913 rtx insn;
12914 rtx call_dest;
12915 int operand_number;
12917 static char buf[256];
12918 if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
12920 tree labelname;
12921 tree funname = get_identifier (XSTR (call_dest, 0));
12923 if (no_previous_def (funname))
12925 int line_number = 0;
12926 rtx label_rtx = gen_label_rtx ();
12927 char *label_buf, temp_buf[256];
12928 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
12929 CODE_LABEL_NUMBER (label_rtx));
12930 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
12931 labelname = get_identifier (label_buf);
12932 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
12933 if (insn)
12934 line_number = NOTE_LINE_NUMBER (insn);
12935 add_compiler_stub (labelname, funname, line_number);
12937 else
12938 labelname = get_prev_label (funname);
12940 sprintf (buf, "jbsr %%z%d,%.246s",
12941 operand_number, IDENTIFIER_POINTER (labelname));
12942 return buf;
12944 else
12946 sprintf (buf, "bl %%z%d", operand_number);
12947 return buf;
12951 #endif /* RS6000_LONG_BRANCH */
12953 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
12954 do { \
12955 const char *const symbol_ = (SYMBOL); \
12956 char *buffer_ = (BUF); \
12957 if (symbol_[0] == '"') \
12959 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
12961 else if (name_needs_quotes(symbol_)) \
12963 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
12965 else \
12967 sprintf(buffer_, "L%d$%s", (N), symbol_); \
12969 } while (0)
12972 /* Generate PIC and indirect symbol stubs. */
12974 void
12975 machopic_output_stub (file, symb, stub)
12976 FILE *file;
12977 const char *symb, *stub;
12979 unsigned int length;
12980 char *symbol_name, *lazy_ptr_name;
12981 char *local_label_0;
12982 static int label = 0;
12984 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
12985 symb = (*targetm.strip_name_encoding) (symb);
12987 label += 1;
12989 length = strlen (symb);
12990 symbol_name = alloca (length + 32);
12991 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
12993 lazy_ptr_name = alloca (length + 32);
12994 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
12996 local_label_0 = alloca (length + 32);
12997 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0, symb, length, 0);
12999 if (flag_pic == 2)
13000 machopic_picsymbol_stub1_section ();
13001 else
13002 machopic_symbol_stub1_section ();
13003 fprintf (file, "\t.align 2\n");
13005 fprintf (file, "%s:\n", stub);
13006 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
13008 if (flag_pic == 2)
13010 fprintf (file, "\tmflr r0\n");
13011 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
13012 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
13013 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
13014 lazy_ptr_name, local_label_0);
13015 fprintf (file, "\tmtlr r0\n");
13016 fprintf (file, "\tlwzu r12,lo16(%s-%s)(r11)\n",
13017 lazy_ptr_name, local_label_0);
13018 fprintf (file, "\tmtctr r12\n");
13019 fprintf (file, "\tbctr\n");
13021 else
13022 fprintf (file, "non-pure not supported\n");
13024 machopic_lazy_symbol_ptr_section ();
13025 fprintf (file, "%s:\n", lazy_ptr_name);
13026 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
13027 fprintf (file, "\t.long dyld_stub_binding_helper\n");
13030 /* Legitimize PIC addresses. If the address is already
13031 position-independent, we return ORIG. Newly generated
13032 position-independent addresses go into a reg. This is REG if non
13033 zero, otherwise we allocate register(s) as necessary. */
13035 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
13038 rs6000_machopic_legitimize_pic_address (orig, mode, reg)
13039 rtx orig;
13040 enum machine_mode mode;
13041 rtx reg;
13043 rtx base, offset;
13045 if (reg == NULL && ! reload_in_progress && ! reload_completed)
13046 reg = gen_reg_rtx (Pmode);
13048 if (GET_CODE (orig) == CONST)
13050 if (GET_CODE (XEXP (orig, 0)) == PLUS
13051 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
13052 return orig;
13054 if (GET_CODE (XEXP (orig, 0)) == PLUS)
13056 base =
13057 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
13058 Pmode, reg);
13059 offset =
13060 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
13061 Pmode, reg);
13063 else
13064 abort ();
13066 if (GET_CODE (offset) == CONST_INT)
13068 if (SMALL_INT (offset))
13069 return plus_constant (base, INTVAL (offset));
13070 else if (! reload_in_progress && ! reload_completed)
13071 offset = force_reg (Pmode, offset);
13072 else
13074 rtx mem = force_const_mem (Pmode, orig);
13075 return machopic_legitimize_pic_address (mem, Pmode, reg);
13078 return gen_rtx (PLUS, Pmode, base, offset);
13081 /* Fall back on generic machopic code. */
13082 return machopic_legitimize_pic_address (orig, mode, reg);
13085 /* This is just a placeholder to make linking work without having to
13086 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
13087 ever needed for Darwin (not too likely!) this would have to get a
13088 real definition. */
13090 void
13091 toc_section ()
13095 #endif /* TARGET_MACHO */
13097 #if TARGET_ELF
13098 static unsigned int
13099 rs6000_elf_section_type_flags (decl, name, reloc)
13100 tree decl;
13101 const char *name;
13102 int reloc;
13104 unsigned int flags
13105 = default_section_type_flags_1 (decl, name, reloc,
13106 flag_pic || DEFAULT_ABI == ABI_AIX);
13108 if (TARGET_RELOCATABLE)
13109 flags |= SECTION_WRITE;
13111 return flags;
13114 /* Record an element in the table of global constructors. SYMBOL is
13115 a SYMBOL_REF of the function to be called; PRIORITY is a number
13116 between 0 and MAX_INIT_PRIORITY.
13118 This differs from default_named_section_asm_out_constructor in
13119 that we have special handling for -mrelocatable. */
13121 static void
13122 rs6000_elf_asm_out_constructor (symbol, priority)
13123 rtx symbol;
13124 int priority;
13126 const char *section = ".ctors";
13127 char buf[16];
13129 if (priority != DEFAULT_INIT_PRIORITY)
13131 sprintf (buf, ".ctors.%.5u",
13132 /* Invert the numbering so the linker puts us in the proper
13133 order; constructors are run from right to left, and the
13134 linker sorts in increasing order. */
13135 MAX_INIT_PRIORITY - priority);
13136 section = buf;
13139 named_section_flags (section, SECTION_WRITE);
13140 assemble_align (POINTER_SIZE);
13142 if (TARGET_RELOCATABLE)
13144 fputs ("\t.long (", asm_out_file);
13145 output_addr_const (asm_out_file, symbol);
13146 fputs (")@fixup\n", asm_out_file);
13148 else
13149 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
13152 static void
13153 rs6000_elf_asm_out_destructor (symbol, priority)
13154 rtx symbol;
13155 int priority;
13157 const char *section = ".dtors";
13158 char buf[16];
13160 if (priority != DEFAULT_INIT_PRIORITY)
13162 sprintf (buf, ".dtors.%.5u",
13163 /* Invert the numbering so the linker puts us in the proper
13164 order; constructors are run from right to left, and the
13165 linker sorts in increasing order. */
13166 MAX_INIT_PRIORITY - priority);
13167 section = buf;
13170 named_section_flags (section, SECTION_WRITE);
13171 assemble_align (POINTER_SIZE);
13173 if (TARGET_RELOCATABLE)
13175 fputs ("\t.long (", asm_out_file);
13176 output_addr_const (asm_out_file, symbol);
13177 fputs (")@fixup\n", asm_out_file);
13179 else
13180 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
13182 #endif
13184 #if TARGET_XCOFF
13185 static void
13186 rs6000_xcoff_asm_globalize_label (stream, name)
13187 FILE *stream;
13188 const char *name;
13190 fputs (GLOBAL_ASM_OP, stream);
13191 RS6000_OUTPUT_BASENAME (stream, name);
13192 putc ('\n', stream);
13195 static void
13196 rs6000_xcoff_asm_named_section (name, flags)
13197 const char *name;
13198 unsigned int flags;
13200 int smclass;
13201 static const char * const suffix[3] = { "PR", "RO", "RW" };
13203 if (flags & SECTION_CODE)
13204 smclass = 0;
13205 else if (flags & SECTION_WRITE)
13206 smclass = 2;
13207 else
13208 smclass = 1;
13210 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
13211 (flags & SECTION_CODE) ? "." : "",
13212 name, suffix[smclass], flags & SECTION_ENTSIZE);
13215 static void
13216 rs6000_xcoff_select_section (decl, reloc, align)
13217 tree decl;
13218 int reloc;
13219 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
13221 if (decl_readonly_section_1 (decl, reloc, 1))
13223 if (TREE_PUBLIC (decl))
13224 read_only_data_section ();
13225 else
13226 read_only_private_data_section ();
13228 else
13230 if (TREE_PUBLIC (decl))
13231 data_section ();
13232 else
13233 private_data_section ();
13237 static void
13238 rs6000_xcoff_unique_section (decl, reloc)
13239 tree decl;
13240 int reloc ATTRIBUTE_UNUSED;
13242 const char *name;
13244 /* Use select_section for private and uninitialized data. */
13245 if (!TREE_PUBLIC (decl)
13246 || DECL_COMMON (decl)
13247 || DECL_INITIAL (decl) == NULL_TREE
13248 || DECL_INITIAL (decl) == error_mark_node
13249 || (flag_zero_initialized_in_bss
13250 && initializer_zerop (DECL_INITIAL (decl))))
13251 return;
13253 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
13254 name = (*targetm.strip_name_encoding) (name);
13255 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
13258 /* Select section for constant in constant pool.
13260 On RS/6000, all constants are in the private read-only data area.
13261 However, if this is being placed in the TOC it must be output as a
13262 toc entry. */
13264 static void
13265 rs6000_xcoff_select_rtx_section (mode, x, align)
13266 enum machine_mode mode;
13267 rtx x;
13268 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
13270 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
13271 toc_section ();
13272 else
13273 read_only_private_data_section ();
13276 /* Remove any trailing [DS] or the like from the symbol name. */
13278 static const char *
13279 rs6000_xcoff_strip_name_encoding (name)
13280 const char *name;
13282 size_t len;
13283 if (*name == '*')
13284 name++;
13285 len = strlen (name);
13286 if (name[len - 1] == ']')
13287 return ggc_alloc_string (name, len - 4);
13288 else
13289 return name;
13292 /* Section attributes. AIX is always PIC. */
13294 static unsigned int
13295 rs6000_xcoff_section_type_flags (decl, name, reloc)
13296 tree decl;
13297 const char *name;
13298 int reloc;
13300 unsigned int align;
13301 unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
13303 /* Align to at least UNIT size. */
13304 if (flags & SECTION_CODE)
13305 align = MIN_UNITS_PER_WORD;
13306 else
13307 /* Increase alignment of large objects if not already stricter. */
13308 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
13309 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
13310 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
13312 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
13315 #endif /* TARGET_XCOFF */
13317 /* Note that this is also used for PPC64 Linux. */
13319 static void
13320 rs6000_xcoff_encode_section_info (decl, first)
13321 tree decl;
13322 int first ATTRIBUTE_UNUSED;
13324 if (TREE_CODE (decl) == FUNCTION_DECL
13325 && (*targetm.binds_local_p) (decl))
13326 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
13329 /* Cross-module name binding. For AIX and PPC64 Linux, which always are
13330 PIC, use private copy of flag_pic. */
13332 static bool
13333 rs6000_binds_local_p (decl)
13334 tree decl;
13336 return default_binds_local_p_1 (decl, flag_pic || rs6000_flag_pic);
13339 /* Compute a (partial) cost for rtx X. Return true if the complete
13340 cost has been computed, and false if subexpressions should be
13341 scanned. In either case, *TOTAL contains the cost result. */
13343 static bool
13344 rs6000_rtx_costs (x, code, outer_code, total)
13345 rtx x;
13346 int code, outer_code ATTRIBUTE_UNUSED;
13347 int *total;
13349 switch (code)
13351 /* On the RS/6000, if it is valid in the insn, it is free.
13352 So this always returns 0. */
13353 case CONST_INT:
13354 case CONST:
13355 case LABEL_REF:
13356 case SYMBOL_REF:
13357 case CONST_DOUBLE:
13358 case HIGH:
13359 *total = 0;
13360 return true;
13362 case PLUS:
13363 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
13364 && ((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1))
13365 + 0x8000) >= 0x10000)
13366 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
13367 ? COSTS_N_INSNS (2)
13368 : COSTS_N_INSNS (1));
13369 return true;
13371 case AND:
13372 case IOR:
13373 case XOR:
13374 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
13375 && (INTVAL (XEXP (x, 1)) & (~ (HOST_WIDE_INT) 0xffff)) != 0
13376 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
13377 ? COSTS_N_INSNS (2)
13378 : COSTS_N_INSNS (1));
13379 return true;
13381 case MULT:
13382 if (optimize_size)
13384 *total = COSTS_N_INSNS (2);
13385 return true;
13387 switch (rs6000_cpu)
13389 case PROCESSOR_RIOS1:
13390 case PROCESSOR_PPC405:
13391 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13392 ? COSTS_N_INSNS (5)
13393 : (INTVAL (XEXP (x, 1)) >= -256
13394 && INTVAL (XEXP (x, 1)) <= 255)
13395 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
13396 return true;
13398 case PROCESSOR_RS64A:
13399 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13400 ? GET_MODE (XEXP (x, 1)) != DImode
13401 ? COSTS_N_INSNS (20) : COSTS_N_INSNS (34)
13402 : (INTVAL (XEXP (x, 1)) >= -256
13403 && INTVAL (XEXP (x, 1)) <= 255)
13404 ? COSTS_N_INSNS (8) : COSTS_N_INSNS (12));
13405 return true;
13407 case PROCESSOR_RIOS2:
13408 case PROCESSOR_MPCCORE:
13409 case PROCESSOR_PPC604e:
13410 *total = COSTS_N_INSNS (2);
13411 return true;
13413 case PROCESSOR_PPC601:
13414 *total = COSTS_N_INSNS (5);
13415 return true;
13417 case PROCESSOR_PPC603:
13418 case PROCESSOR_PPC7400:
13419 case PROCESSOR_PPC750:
13420 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13421 ? COSTS_N_INSNS (5)
13422 : (INTVAL (XEXP (x, 1)) >= -256
13423 && INTVAL (XEXP (x, 1)) <= 255)
13424 ? COSTS_N_INSNS (2) : COSTS_N_INSNS (3));
13425 return true;
13427 case PROCESSOR_PPC7450:
13428 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13429 ? COSTS_N_INSNS (4)
13430 : COSTS_N_INSNS (3));
13431 return true;
13433 case PROCESSOR_PPC403:
13434 case PROCESSOR_PPC604:
13435 case PROCESSOR_PPC8540:
13436 *total = COSTS_N_INSNS (4);
13437 return true;
13439 case PROCESSOR_PPC620:
13440 case PROCESSOR_PPC630:
13441 case PROCESSOR_POWER4:
13442 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13443 ? GET_MODE (XEXP (x, 1)) != DImode
13444 ? COSTS_N_INSNS (5) : COSTS_N_INSNS (7)
13445 : (INTVAL (XEXP (x, 1)) >= -256
13446 && INTVAL (XEXP (x, 1)) <= 255)
13447 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
13448 return true;
13450 default:
13451 abort ();
13454 case DIV:
13455 case MOD:
13456 if (GET_CODE (XEXP (x, 1)) == CONST_INT
13457 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
13459 *total = COSTS_N_INSNS (2);
13460 return true;
13462 /* FALLTHRU */
13464 case UDIV:
13465 case UMOD:
13466 switch (rs6000_cpu)
13468 case PROCESSOR_RIOS1:
13469 *total = COSTS_N_INSNS (19);
13470 return true;
13472 case PROCESSOR_RIOS2:
13473 *total = COSTS_N_INSNS (13);
13474 return true;
13476 case PROCESSOR_RS64A:
13477 *total = (GET_MODE (XEXP (x, 1)) != DImode
13478 ? COSTS_N_INSNS (65)
13479 : COSTS_N_INSNS (67));
13480 return true;
13482 case PROCESSOR_MPCCORE:
13483 *total = COSTS_N_INSNS (6);
13484 return true;
13486 case PROCESSOR_PPC403:
13487 *total = COSTS_N_INSNS (33);
13488 return true;
13490 case PROCESSOR_PPC405:
13491 *total = COSTS_N_INSNS (35);
13492 return true;
13494 case PROCESSOR_PPC601:
13495 *total = COSTS_N_INSNS (36);
13496 return true;
13498 case PROCESSOR_PPC603:
13499 *total = COSTS_N_INSNS (37);
13500 return true;
13502 case PROCESSOR_PPC604:
13503 case PROCESSOR_PPC604e:
13504 *total = COSTS_N_INSNS (20);
13505 return true;
13507 case PROCESSOR_PPC620:
13508 case PROCESSOR_PPC630:
13509 case PROCESSOR_POWER4:
13510 *total = (GET_MODE (XEXP (x, 1)) != DImode
13511 ? COSTS_N_INSNS (21)
13512 : COSTS_N_INSNS (37));
13513 return true;
13515 case PROCESSOR_PPC750:
13516 case PROCESSOR_PPC8540:
13517 case PROCESSOR_PPC7400:
13518 *total = COSTS_N_INSNS (19);
13519 return true;
13521 case PROCESSOR_PPC7450:
13522 *total = COSTS_N_INSNS (23);
13523 return true;
13525 default:
13526 abort ();
13529 case FFS:
13530 *total = COSTS_N_INSNS (4);
13531 return true;
13533 case MEM:
13534 /* MEM should be slightly more expensive than (plus (reg) (const)) */
13535 *total = 5;
13536 return true;
13538 default:
13539 return false;
13543 /* A C expression returning the cost of moving data from a register of class
13544 CLASS1 to one of CLASS2. */
13547 rs6000_register_move_cost (mode, from, to)
13548 enum machine_mode mode;
13549 enum reg_class from, to;
13551 /* Moves from/to GENERAL_REGS. */
13552 if (reg_classes_intersect_p (to, GENERAL_REGS)
13553 || reg_classes_intersect_p (from, GENERAL_REGS))
13555 if (! reg_classes_intersect_p (to, GENERAL_REGS))
13556 from = to;
13558 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
13559 return (rs6000_memory_move_cost (mode, from, 0)
13560 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
13562 /* It's more expensive to move CR_REGS than CR0_REGS because of the shift...*/
13563 else if (from == CR_REGS)
13564 return 4;
13566 else
13567 /* A move will cost one instruction per GPR moved. */
13568 return 2 * HARD_REGNO_NREGS (0, mode);
13571 /* Moving between two similar registers is just one instruction. */
13572 else if (reg_classes_intersect_p (to, from))
13573 return mode == TFmode ? 4 : 2;
13575 /* Everything else has to go through GENERAL_REGS. */
13576 else
13577 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
13578 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
13581 /* A C expressions returning the cost of moving data of MODE from a register to
13582 or from memory. */
13585 rs6000_memory_move_cost (mode, class, in)
13586 enum machine_mode mode;
13587 enum reg_class class;
13588 int in ATTRIBUTE_UNUSED;
13590 if (reg_classes_intersect_p (class, GENERAL_REGS))
13591 return 4 * HARD_REGNO_NREGS (0, mode);
13592 else if (reg_classes_intersect_p (class, FLOAT_REGS))
13593 return 4 * HARD_REGNO_NREGS (32, mode);
13594 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
13595 return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO, mode);
13596 else
13597 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
13600 /* Return true if TYPE is of type __ev64_opaque__. */
13602 static int
13603 is_ev64_opaque_type (type)
13604 tree type;
13606 return (TYPE_NAME (type)
13607 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
13608 && DECL_NAME (TYPE_NAME (type))
13609 && strcmp (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type))),
13610 "__ev64_opaque__") == 0);
13613 /* Return true if vector type1 can be converted into vector type2. */
13615 static bool
13616 rs6000_spe_vector_types_compatible (t1, t2)
13617 tree t1;
13618 tree t2;
13620 if (!TARGET_SPE
13621 || TREE_CODE (t1) != VECTOR_TYPE || TREE_CODE (t2) != VECTOR_TYPE)
13622 return 0;
13624 if (TYPE_NAME (t1) || TYPE_NAME (t2))
13625 return is_ev64_opaque_type (t1) || is_ev64_opaque_type (t2);
13627 /* FIXME: We assume V2SI is the opaque type, so we accidentally
13628 allow inter conversion to and from V2SI modes. We could use
13629 V1D1, and rewrite <spe.h> accordingly. */
13630 return t1 == V2SI_type_node || t2 == V2SI_type_node;
13633 #include "gt-rs6000.h"