2003-05-19 Aldy Hernandez <aldyh@redhat.com>
[official-gcc.git] / gcc / config / rs6000 / rs6000.c
blobf851e95af6ab72a8fb1e3727873dd6a5273230d1
1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 2, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the
20 Free Software Foundation, 59 Temple Place - Suite 330, Boston,
21 MA 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
34 #include "flags.h"
35 #include "recog.h"
36 #include "obstack.h"
37 #include "tree.h"
38 #include "expr.h"
39 #include "optabs.h"
40 #include "except.h"
41 #include "function.h"
42 #include "output.h"
43 #include "basic-block.h"
44 #include "integrate.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "hashtab.h"
48 #include "tm_p.h"
49 #include "target.h"
50 #include "target-def.h"
51 #include "langhooks.h"
52 #include "reload.h"
54 #ifndef TARGET_NO_PROTOTYPE
55 #define TARGET_NO_PROTOTYPE 0
56 #endif
58 #define EASY_VECTOR_15(n, x, y) ((n) >= -16 && (n) <= 15 \
59 && easy_vector_same (x, y))
61 #define EASY_VECTOR_15_ADD_SELF(n, x, y) ((n) >= 0x10 && (n) <= 0x1e \
62 && !((n) & 1) \
63 && easy_vector_same (x, y))
65 #define min(A,B) ((A) < (B) ? (A) : (B))
66 #define max(A,B) ((A) > (B) ? (A) : (B))
68 /* Target cpu type */
70 enum processor_type rs6000_cpu;
71 struct rs6000_cpu_select rs6000_select[3] =
73 /* switch name, tune arch */
74 { (const char *)0, "--with-cpu=", 1, 1 },
75 { (const char *)0, "-mcpu=", 1, 1 },
76 { (const char *)0, "-mtune=", 1, 0 },
79 /* Size of long double */
80 const char *rs6000_long_double_size_string;
81 int rs6000_long_double_type_size;
83 /* Whether -mabi=altivec has appeared */
84 int rs6000_altivec_abi;
86 /* Whether VRSAVE instructions should be generated. */
87 int rs6000_altivec_vrsave;
89 /* String from -mvrsave= option. */
90 const char *rs6000_altivec_vrsave_string;
92 /* Nonzero if we want SPE ABI extensions. */
93 int rs6000_spe_abi;
95 /* Whether isel instructions should be generated. */
96 int rs6000_isel;
98 /* Whether SPE simd instructions should be generated. */
99 int rs6000_spe;
101 /* Nonzero if floating point operations are done in the GPRs. */
102 int rs6000_float_gprs = 0;
104 /* String from -mfloat-gprs=. */
105 const char *rs6000_float_gprs_string;
107 /* String from -misel=. */
108 const char *rs6000_isel_string;
110 /* String from -mspe=. */
111 const char *rs6000_spe_string;
113 /* Set to nonzero once AIX common-mode calls have been defined. */
114 static GTY(()) int common_mode_defined;
116 /* Save information from a "cmpxx" operation until the branch or scc is
117 emitted. */
118 rtx rs6000_compare_op0, rs6000_compare_op1;
119 int rs6000_compare_fp_p;
121 /* Label number of label created for -mrelocatable, to call to so we can
122 get the address of the GOT section */
123 int rs6000_pic_labelno;
125 #ifdef USING_ELFOS_H
126 /* Which abi to adhere to */
127 const char *rs6000_abi_name = RS6000_ABI_NAME;
129 /* Semantics of the small data area */
130 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
132 /* Which small data model to use */
133 const char *rs6000_sdata_name = (char *)0;
135 /* Counter for labels which are to be placed in .fixup. */
136 int fixuplabelno = 0;
137 #endif
139 /* Bit size of immediate TLS offsets and string from which it is decoded. */
140 int rs6000_tls_size = 32;
141 const char *rs6000_tls_size_string;
143 /* ABI enumeration available for subtarget to use. */
144 enum rs6000_abi rs6000_current_abi;
146 /* ABI string from -mabi= option. */
147 const char *rs6000_abi_string;
149 /* Debug flags */
150 const char *rs6000_debug_name;
151 int rs6000_debug_stack; /* debug stack applications */
152 int rs6000_debug_arg; /* debug argument handling */
154 /* Opaque types. */
155 static GTY(()) tree opaque_V2SI_type_node;
156 static GTY(()) tree opaque_V2SF_type_node;
157 static GTY(()) tree opaque_p_V2SI_type_node;
159 const char *rs6000_traceback_name;
160 static enum {
161 traceback_default = 0,
162 traceback_none,
163 traceback_part,
164 traceback_full
165 } rs6000_traceback;
167 /* Flag to say the TOC is initialized */
168 int toc_initialized;
169 char toc_label_name[10];
171 /* Alias set for saves and restores from the rs6000 stack. */
172 static int rs6000_sr_alias_set;
174 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
175 The only place that looks at this is rs6000_set_default_type_attributes;
176 everywhere else should rely on the presence or absence of a longcall
177 attribute on the function declaration. */
178 int rs6000_default_long_calls;
179 const char *rs6000_longcall_switch;
181 /* Control alignment for fields within structures. */
182 /* String from -malign-XXXXX. */
183 const char *rs6000_alignment_string;
184 int rs6000_alignment_flags;
186 struct builtin_description
188 /* mask is not const because we're going to alter it below. This
189 nonsense will go away when we rewrite the -march infrastructure
190 to give us more target flag bits. */
191 unsigned int mask;
192 const enum insn_code icode;
193 const char *const name;
194 const enum rs6000_builtins code;
197 static bool rs6000_function_ok_for_sibcall PARAMS ((tree, tree));
198 static int num_insns_constant_wide PARAMS ((HOST_WIDE_INT));
199 static void validate_condition_mode
200 PARAMS ((enum rtx_code, enum machine_mode));
201 static rtx rs6000_generate_compare PARAMS ((enum rtx_code));
202 static void rs6000_maybe_dead PARAMS ((rtx));
203 static void rs6000_emit_stack_tie PARAMS ((void));
204 static void rs6000_frame_related PARAMS ((rtx, rtx, HOST_WIDE_INT, rtx, rtx));
205 static rtx spe_synthesize_frame_save PARAMS ((rtx));
206 static bool spe_func_has_64bit_regs_p PARAMS ((void));
207 static void emit_frame_save PARAMS ((rtx, rtx, enum machine_mode,
208 unsigned int, int, int));
209 static rtx gen_frame_mem_offset PARAMS ((enum machine_mode, rtx, int));
210 static void rs6000_emit_allocate_stack PARAMS ((HOST_WIDE_INT, int));
211 static unsigned rs6000_hash_constant PARAMS ((rtx));
212 static unsigned toc_hash_function PARAMS ((const void *));
213 static int toc_hash_eq PARAMS ((const void *, const void *));
214 static int constant_pool_expr_1 PARAMS ((rtx, int *, int *));
215 static bool constant_pool_expr_p PARAMS ((rtx));
216 static bool toc_relative_expr_p PARAMS ((rtx));
217 static bool legitimate_small_data_p PARAMS ((enum machine_mode, rtx));
218 static bool legitimate_offset_address_p PARAMS ((enum machine_mode, rtx, int));
219 static bool legitimate_indexed_address_p PARAMS ((rtx, int));
220 static bool legitimate_indirect_address_p PARAMS ((rtx, int));
221 static bool legitimate_lo_sum_address_p PARAMS ((enum machine_mode, rtx, int));
222 static struct machine_function * rs6000_init_machine_status PARAMS ((void));
223 static bool rs6000_assemble_integer PARAMS ((rtx, unsigned int, int));
224 #ifdef HAVE_GAS_HIDDEN
225 static void rs6000_assemble_visibility PARAMS ((tree, int));
226 #endif
227 static int rs6000_ra_ever_killed PARAMS ((void));
228 static tree rs6000_handle_longcall_attribute PARAMS ((tree *, tree, tree, int, bool *));
229 extern const struct attribute_spec rs6000_attribute_table[];
230 static void rs6000_set_default_type_attributes PARAMS ((tree));
231 static void rs6000_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
232 static void rs6000_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
233 static void rs6000_output_mi_thunk PARAMS ((FILE *, tree, HOST_WIDE_INT,
234 HOST_WIDE_INT, tree));
235 static rtx rs6000_emit_set_long_const PARAMS ((rtx,
236 HOST_WIDE_INT, HOST_WIDE_INT));
237 #if TARGET_ELF
238 static unsigned int rs6000_elf_section_type_flags PARAMS ((tree, const char *,
239 int));
240 static void rs6000_elf_asm_out_constructor PARAMS ((rtx, int));
241 static void rs6000_elf_asm_out_destructor PARAMS ((rtx, int));
242 static void rs6000_elf_select_section PARAMS ((tree, int,
243 unsigned HOST_WIDE_INT));
244 static void rs6000_elf_unique_section PARAMS ((tree, int));
245 static void rs6000_elf_select_rtx_section PARAMS ((enum machine_mode, rtx,
246 unsigned HOST_WIDE_INT));
247 static void rs6000_elf_encode_section_info PARAMS ((tree, rtx, int))
248 ATTRIBUTE_UNUSED;
249 static bool rs6000_elf_in_small_data_p PARAMS ((tree));
250 #endif
251 #if TARGET_XCOFF
252 static void rs6000_xcoff_asm_globalize_label PARAMS ((FILE *, const char *));
253 static void rs6000_xcoff_asm_named_section PARAMS ((const char *, unsigned int));
254 static void rs6000_xcoff_select_section PARAMS ((tree, int,
255 unsigned HOST_WIDE_INT));
256 static void rs6000_xcoff_unique_section PARAMS ((tree, int));
257 static void rs6000_xcoff_select_rtx_section PARAMS ((enum machine_mode, rtx,
258 unsigned HOST_WIDE_INT));
259 static const char * rs6000_xcoff_strip_name_encoding PARAMS ((const char *));
260 static unsigned int rs6000_xcoff_section_type_flags PARAMS ((tree, const char *, int));
261 #endif
262 #if TARGET_MACHO
263 static bool rs6000_binds_local_p PARAMS ((tree));
264 #endif
265 static int rs6000_use_dfa_pipeline_interface PARAMS ((void));
266 static int rs6000_variable_issue PARAMS ((FILE *, int, rtx, int));
267 static bool rs6000_rtx_costs PARAMS ((rtx, int, int, int *));
268 static int rs6000_adjust_cost PARAMS ((rtx, rtx, rtx, int));
269 static int rs6000_adjust_priority PARAMS ((rtx, int));
270 static int rs6000_issue_rate PARAMS ((void));
271 static int rs6000_use_sched_lookahead PARAMS ((void));
273 static void rs6000_init_builtins PARAMS ((void));
274 static rtx rs6000_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx));
275 static rtx rs6000_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx));
276 static rtx rs6000_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx));
277 static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int));
278 static void altivec_init_builtins PARAMS ((void));
279 static void rs6000_common_init_builtins PARAMS ((void));
281 static void enable_mask_for_builtins PARAMS ((struct builtin_description *,
282 int, enum rs6000_builtins,
283 enum rs6000_builtins));
284 static void spe_init_builtins PARAMS ((void));
285 static rtx spe_expand_builtin PARAMS ((tree, rtx, bool *));
286 static rtx spe_expand_predicate_builtin PARAMS ((enum insn_code, tree, rtx));
287 static rtx spe_expand_evsel_builtin PARAMS ((enum insn_code, tree, rtx));
288 static int rs6000_emit_int_cmove PARAMS ((rtx, rtx, rtx, rtx));
290 static rtx altivec_expand_builtin PARAMS ((tree, rtx, bool *));
291 static rtx altivec_expand_ld_builtin PARAMS ((tree, rtx, bool *));
292 static rtx altivec_expand_st_builtin PARAMS ((tree, rtx, bool *));
293 static rtx altivec_expand_dst_builtin PARAMS ((tree, rtx, bool *));
294 static rtx altivec_expand_abs_builtin PARAMS ((enum insn_code, tree, rtx));
295 static rtx altivec_expand_predicate_builtin PARAMS ((enum insn_code, const char *, tree, rtx));
296 static rtx altivec_expand_stv_builtin PARAMS ((enum insn_code, tree));
297 static void rs6000_parse_abi_options PARAMS ((void));
298 static void rs6000_parse_alignment_option PARAMS ((void));
299 static void rs6000_parse_tls_size_option PARAMS ((void));
300 static void rs6000_parse_yes_no_option (const char *, const char *, int *);
301 static int first_altivec_reg_to_save PARAMS ((void));
302 static unsigned int compute_vrsave_mask PARAMS ((void));
303 static void is_altivec_return_reg PARAMS ((rtx, void *));
304 static rtx generate_set_vrsave PARAMS ((rtx, rs6000_stack_t *, int));
305 int easy_vector_constant PARAMS ((rtx, enum machine_mode));
306 static int easy_vector_same PARAMS ((rtx, enum machine_mode));
307 static bool is_ev64_opaque_type PARAMS ((tree));
308 static rtx rs6000_dwarf_register_span PARAMS ((rtx));
309 static rtx rs6000_legitimize_tls_address PARAMS ((rtx, enum tls_model));
310 static rtx rs6000_tls_get_addr PARAMS ((void));
311 static rtx rs6000_got_sym PARAMS ((void));
312 static inline int rs6000_tls_symbol_ref_1 PARAMS ((rtx *, void *));
313 static const char *rs6000_get_some_local_dynamic_name PARAMS ((void));
314 static int rs6000_get_some_local_dynamic_name_1 PARAMS ((rtx *, void *));
316 /* Hash table stuff for keeping track of TOC entries. */
318 struct toc_hash_struct GTY(())
320 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
321 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
322 rtx key;
323 enum machine_mode key_mode;
324 int labelno;
327 static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
329 /* Default register names. */
330 char rs6000_reg_names[][8] =
332 "0", "1", "2", "3", "4", "5", "6", "7",
333 "8", "9", "10", "11", "12", "13", "14", "15",
334 "16", "17", "18", "19", "20", "21", "22", "23",
335 "24", "25", "26", "27", "28", "29", "30", "31",
336 "0", "1", "2", "3", "4", "5", "6", "7",
337 "8", "9", "10", "11", "12", "13", "14", "15",
338 "16", "17", "18", "19", "20", "21", "22", "23",
339 "24", "25", "26", "27", "28", "29", "30", "31",
340 "mq", "lr", "ctr","ap",
341 "0", "1", "2", "3", "4", "5", "6", "7",
342 "xer",
343 /* AltiVec registers. */
344 "0", "1", "2", "3", "4", "5", "6", "7",
345 "8", "9", "10", "11", "12", "13", "14", "15",
346 "16", "17", "18", "19", "20", "21", "22", "23",
347 "24", "25", "26", "27", "28", "29", "30", "31",
348 "vrsave", "vscr",
349 /* SPE registers. */
350 "spe_acc", "spefscr"
353 #ifdef TARGET_REGNAMES
354 static const char alt_reg_names[][8] =
356 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
357 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
358 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
359 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
360 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
361 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
362 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
363 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
364 "mq", "lr", "ctr", "ap",
365 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
366 "xer",
367 /* AltiVec registers. */
368 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
369 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
370 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
371 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
372 "vrsave", "vscr",
373 /* SPE registers. */
374 "spe_acc", "spefscr"
376 #endif
378 #ifndef MASK_STRICT_ALIGN
379 #define MASK_STRICT_ALIGN 0
380 #endif
381 #ifndef TARGET_PROFILE_KERNEL
382 #define TARGET_PROFILE_KERNEL 0
383 #endif
385 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
386 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
388 /* Return 1 for a symbol ref for a thread-local storage symbol. */
389 #define RS6000_SYMBOL_REF_TLS_P(RTX) \
390 (GET_CODE (RTX) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (RTX) != 0)
392 /* Initialize the GCC target structure. */
393 #undef TARGET_ATTRIBUTE_TABLE
394 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
395 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
396 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
398 #undef TARGET_ASM_ALIGNED_DI_OP
399 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
401 /* Default unaligned ops are only provided for ELF. Find the ops needed
402 for non-ELF systems. */
403 #ifndef OBJECT_FORMAT_ELF
404 #if TARGET_XCOFF
405 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
406 64-bit targets. */
407 #undef TARGET_ASM_UNALIGNED_HI_OP
408 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
409 #undef TARGET_ASM_UNALIGNED_SI_OP
410 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
411 #undef TARGET_ASM_UNALIGNED_DI_OP
412 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
413 #else
414 /* For Darwin. */
415 #undef TARGET_ASM_UNALIGNED_HI_OP
416 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
417 #undef TARGET_ASM_UNALIGNED_SI_OP
418 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
419 #endif
420 #endif
422 /* This hook deals with fixups for relocatable code and DI-mode objects
423 in 64-bit code. */
424 #undef TARGET_ASM_INTEGER
425 #define TARGET_ASM_INTEGER rs6000_assemble_integer
427 #ifdef HAVE_GAS_HIDDEN
428 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
429 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
430 #endif
432 #undef TARGET_HAVE_TLS
433 #define TARGET_HAVE_TLS HAVE_AS_TLS
435 #undef TARGET_CANNOT_FORCE_CONST_MEM
436 #define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
438 #undef TARGET_ASM_FUNCTION_PROLOGUE
439 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
440 #undef TARGET_ASM_FUNCTION_EPILOGUE
441 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
443 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
444 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE rs6000_use_dfa_pipeline_interface
445 #undef TARGET_SCHED_VARIABLE_ISSUE
446 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
448 #undef TARGET_SCHED_ISSUE_RATE
449 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
450 #undef TARGET_SCHED_ADJUST_COST
451 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
452 #undef TARGET_SCHED_ADJUST_PRIORITY
453 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
455 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
456 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
458 #undef TARGET_INIT_BUILTINS
459 #define TARGET_INIT_BUILTINS rs6000_init_builtins
461 #undef TARGET_EXPAND_BUILTIN
462 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
464 #if TARGET_MACHO
465 #undef TARGET_BINDS_LOCAL_P
466 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
467 #endif
469 #undef TARGET_ASM_OUTPUT_MI_THUNK
470 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
472 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
473 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
475 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
476 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
478 #undef TARGET_RTX_COSTS
479 #define TARGET_RTX_COSTS rs6000_rtx_costs
480 #undef TARGET_ADDRESS_COST
481 #define TARGET_ADDRESS_COST hook_int_rtx_0
483 #undef TARGET_VECTOR_OPAQUE_P
484 #define TARGET_VECTOR_OPAQUE_P is_ev64_opaque_type
486 #undef TARGET_DWARF_REGISTER_SPAN
487 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
489 struct gcc_target targetm = TARGET_INITIALIZER;
491 /* Override command line options. Mostly we process the processor
492 type and sometimes adjust other TARGET_ options. */
494 void
495 rs6000_override_options (default_cpu)
496 const char *default_cpu;
498 size_t i, j;
499 struct rs6000_cpu_select *ptr;
501 /* Simplify the entries below by making a mask for any POWER
502 variant and any PowerPC variant. */
504 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
505 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
506 | MASK_PPC_GFXOPT | MASK_POWERPC64)
507 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
509 static struct ptt
511 const char *const name; /* Canonical processor name. */
512 const enum processor_type processor; /* Processor type enum value. */
513 const int target_enable; /* Target flags to enable. */
514 const int target_disable; /* Target flags to disable. */
515 } const processor_target_table[]
516 = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
517 POWER_MASKS | POWERPC_MASKS},
518 {"power", PROCESSOR_POWER,
519 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
520 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
521 {"power2", PROCESSOR_POWER,
522 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
523 POWERPC_MASKS | MASK_NEW_MNEMONICS},
524 {"power3", PROCESSOR_PPC630,
525 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
526 POWER_MASKS},
527 {"power4", PROCESSOR_POWER4,
528 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
529 POWER_MASKS},
530 {"powerpc", PROCESSOR_POWERPC,
531 MASK_POWERPC | MASK_NEW_MNEMONICS,
532 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
533 {"powerpc64", PROCESSOR_POWERPC64,
534 MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
535 POWER_MASKS | POWERPC_OPT_MASKS},
536 {"rios", PROCESSOR_RIOS1,
537 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
538 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
539 {"rios1", PROCESSOR_RIOS1,
540 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
541 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
542 {"rsc", PROCESSOR_PPC601,
543 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
544 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
545 {"rsc1", PROCESSOR_PPC601,
546 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
547 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
548 {"rios2", PROCESSOR_RIOS2,
549 MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
550 POWERPC_MASKS | MASK_NEW_MNEMONICS},
551 {"rs64a", PROCESSOR_RS64A,
552 MASK_POWERPC | MASK_NEW_MNEMONICS,
553 POWER_MASKS | POWERPC_OPT_MASKS},
554 {"401", PROCESSOR_PPC403,
555 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
556 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
557 {"403", PROCESSOR_PPC403,
558 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
559 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
560 {"405", PROCESSOR_PPC405,
561 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
562 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
563 {"405fp", PROCESSOR_PPC405,
564 MASK_POWERPC | MASK_NEW_MNEMONICS,
565 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
566 {"440", PROCESSOR_PPC440,
567 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
568 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
569 {"440fp", PROCESSOR_PPC440,
570 MASK_POWERPC | MASK_NEW_MNEMONICS,
571 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
572 {"505", PROCESSOR_MPCCORE,
573 MASK_POWERPC | MASK_NEW_MNEMONICS,
574 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
575 {"601", PROCESSOR_PPC601,
576 MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
577 MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
578 {"602", PROCESSOR_PPC603,
579 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
580 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
581 {"603", PROCESSOR_PPC603,
582 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
583 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
584 {"603e", PROCESSOR_PPC603,
585 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
586 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
587 {"ec603e", PROCESSOR_PPC603,
588 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
589 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
590 {"604", PROCESSOR_PPC604,
591 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
592 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
593 {"604e", PROCESSOR_PPC604e,
594 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
595 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
596 {"620", PROCESSOR_PPC620,
597 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
598 POWER_MASKS},
599 {"630", PROCESSOR_PPC630,
600 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
601 POWER_MASKS},
602 {"740", PROCESSOR_PPC750,
603 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
604 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
605 {"750", PROCESSOR_PPC750,
606 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
607 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
608 {"7400", PROCESSOR_PPC7400,
609 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
610 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
611 {"7450", PROCESSOR_PPC7450,
612 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
613 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
614 {"8540", PROCESSOR_PPC8540,
615 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
616 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
617 {"801", PROCESSOR_MPCCORE,
618 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
619 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
620 {"821", PROCESSOR_MPCCORE,
621 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
622 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
623 {"823", PROCESSOR_MPCCORE,
624 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
625 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
626 {"860", PROCESSOR_MPCCORE,
627 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
628 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64}};
630 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
632 /* Save current -mmultiple/-mno-multiple status. */
633 int multiple = TARGET_MULTIPLE;
634 /* Save current -mstring/-mno-string status. */
635 int string = TARGET_STRING;
637 /* Identify the processor type. */
638 rs6000_select[0].string = default_cpu;
639 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
641 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
643 ptr = &rs6000_select[i];
644 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
646 for (j = 0; j < ptt_size; j++)
647 if (! strcmp (ptr->string, processor_target_table[j].name))
649 if (ptr->set_tune_p)
650 rs6000_cpu = processor_target_table[j].processor;
652 if (ptr->set_arch_p)
654 target_flags |= processor_target_table[j].target_enable;
655 target_flags &= ~processor_target_table[j].target_disable;
657 break;
660 if (j == ptt_size)
661 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
665 if (TARGET_E500)
666 rs6000_isel = 1;
668 /* If we are optimizing big endian systems for space, use the load/store
669 multiple and string instructions. */
670 if (BYTES_BIG_ENDIAN && optimize_size)
671 target_flags |= MASK_MULTIPLE | MASK_STRING;
673 /* If -mmultiple or -mno-multiple was explicitly used, don't
674 override with the processor default */
675 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
676 target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
678 /* If -mstring or -mno-string was explicitly used, don't override
679 with the processor default. */
680 if ((target_flags_explicit & MASK_STRING) != 0)
681 target_flags = (target_flags & ~MASK_STRING) | string;
683 /* Don't allow -mmultiple or -mstring on little endian systems
684 unless the cpu is a 750, because the hardware doesn't support the
685 instructions used in little endian mode, and causes an alignment
686 trap. The 750 does not cause an alignment trap (except when the
687 target is unaligned). */
689 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
691 if (TARGET_MULTIPLE)
693 target_flags &= ~MASK_MULTIPLE;
694 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
695 warning ("-mmultiple is not supported on little endian systems");
698 if (TARGET_STRING)
700 target_flags &= ~MASK_STRING;
701 if ((target_flags_explicit & MASK_STRING) != 0)
702 warning ("-mstring is not supported on little endian systems");
706 /* Set debug flags */
707 if (rs6000_debug_name)
709 if (! strcmp (rs6000_debug_name, "all"))
710 rs6000_debug_stack = rs6000_debug_arg = 1;
711 else if (! strcmp (rs6000_debug_name, "stack"))
712 rs6000_debug_stack = 1;
713 else if (! strcmp (rs6000_debug_name, "arg"))
714 rs6000_debug_arg = 1;
715 else
716 error ("unknown -mdebug-%s switch", rs6000_debug_name);
719 if (rs6000_traceback_name)
721 if (! strncmp (rs6000_traceback_name, "full", 4))
722 rs6000_traceback = traceback_full;
723 else if (! strncmp (rs6000_traceback_name, "part", 4))
724 rs6000_traceback = traceback_part;
725 else if (! strncmp (rs6000_traceback_name, "no", 2))
726 rs6000_traceback = traceback_none;
727 else
728 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
729 rs6000_traceback_name);
732 /* Set size of long double */
733 rs6000_long_double_type_size = 64;
734 if (rs6000_long_double_size_string)
736 char *tail;
737 int size = strtol (rs6000_long_double_size_string, &tail, 10);
738 if (*tail != '\0' || (size != 64 && size != 128))
739 error ("Unknown switch -mlong-double-%s",
740 rs6000_long_double_size_string);
741 else
742 rs6000_long_double_type_size = size;
745 /* Handle -mabi= options. */
746 rs6000_parse_abi_options ();
748 /* Handle -malign-XXXXX option. */
749 rs6000_parse_alignment_option ();
751 /* Handle generic -mFOO=YES/NO options. */
752 rs6000_parse_yes_no_option ("vrsave", rs6000_altivec_vrsave_string,
753 &rs6000_altivec_vrsave);
754 rs6000_parse_yes_no_option ("isel", rs6000_isel_string,
755 &rs6000_isel);
756 rs6000_parse_yes_no_option ("spe", rs6000_spe_string, &rs6000_spe);
757 rs6000_parse_yes_no_option ("float-gprs", rs6000_float_gprs_string,
758 &rs6000_float_gprs);
760 /* Handle -mtls-size option. */
761 rs6000_parse_tls_size_option ();
763 #ifdef SUBTARGET_OVERRIDE_OPTIONS
764 SUBTARGET_OVERRIDE_OPTIONS;
765 #endif
766 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
767 SUBSUBTARGET_OVERRIDE_OPTIONS;
768 #endif
770 if (TARGET_E500)
772 /* The e500 does not have string instructions, and we set
773 MASK_STRING above when optimizing for size. */
774 if ((target_flags & MASK_STRING) != 0)
775 target_flags = target_flags & ~MASK_STRING;
777 /* No SPE means 64-bit long doubles, even if an E500. */
778 if (rs6000_spe_string != 0
779 && !strcmp (rs6000_spe_string, "no"))
780 rs6000_long_double_type_size = 64;
782 else if (rs6000_select[1].string != NULL)
784 /* For the powerpc-eabispe configuration, we set all these by
785 default, so let's unset them if we manually set another
786 CPU that is not the E500. */
787 if (rs6000_abi_string == 0)
788 rs6000_spe_abi = 0;
789 if (rs6000_spe_string == 0)
790 rs6000_spe = 0;
791 if (rs6000_float_gprs_string == 0)
792 rs6000_float_gprs = 0;
793 if (rs6000_isel_string == 0)
794 rs6000_isel = 0;
795 if (rs6000_long_double_size_string == 0)
796 rs6000_long_double_type_size = 64;
799 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
800 using TARGET_OPTIONS to handle a toggle switch, but we're out of
801 bits in target_flags so TARGET_SWITCHES cannot be used.
802 Assumption here is that rs6000_longcall_switch points into the
803 text of the complete option, rather than being a copy, so we can
804 scan back for the presence or absence of the no- modifier. */
805 if (rs6000_longcall_switch)
807 const char *base = rs6000_longcall_switch;
808 while (base[-1] != 'm') base--;
810 if (*rs6000_longcall_switch != '\0')
811 error ("invalid option `%s'", base);
812 rs6000_default_long_calls = (base[0] != 'n');
815 #ifdef TARGET_REGNAMES
816 /* If the user desires alternate register names, copy in the
817 alternate names now. */
818 if (TARGET_REGNAMES)
819 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
820 #endif
822 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
823 If -maix-struct-return or -msvr4-struct-return was explicitly
824 used, don't override with the ABI default. */
825 if ((target_flags_explicit & MASK_AIX_STRUCT_RET) == 0)
827 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
828 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
829 else
830 target_flags |= MASK_AIX_STRUCT_RET;
833 if (TARGET_LONG_DOUBLE_128
834 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN))
835 real_format_for_mode[TFmode - QFmode] = &ibm_extended_format;
837 /* Allocate an alias set for register saves & restores from stack. */
838 rs6000_sr_alias_set = new_alias_set ();
840 if (TARGET_TOC)
841 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
843 /* We can only guarantee the availability of DI pseudo-ops when
844 assembling for 64-bit targets. */
845 if (!TARGET_64BIT)
847 targetm.asm_out.aligned_op.di = NULL;
848 targetm.asm_out.unaligned_op.di = NULL;
851 /* Set maximum branch target alignment at two instructions, eight bytes. */
852 align_jumps_max_skip = 8;
853 align_loops_max_skip = 8;
855 /* Arrange to save and restore machine status around nested functions. */
856 init_machine_status = rs6000_init_machine_status;
859 /* Handle generic options of the form -mfoo=yes/no.
860 NAME is the option name.
861 VALUE is the option value.
862 FLAG is the pointer to the flag where to store a 1 or 0, depending on
863 whether the option value is 'yes' or 'no' respectively. */
864 static void
865 rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
867 if (value == 0)
868 return;
869 else if (!strcmp (value, "yes"))
870 *flag = 1;
871 else if (!strcmp (value, "no"))
872 *flag = 0;
873 else
874 error ("unknown -m%s= option specified: '%s'", name, value);
877 /* Handle -mabi= options. */
878 static void
879 rs6000_parse_abi_options ()
881 if (rs6000_abi_string == 0)
882 return;
883 else if (! strcmp (rs6000_abi_string, "altivec"))
884 rs6000_altivec_abi = 1;
885 else if (! strcmp (rs6000_abi_string, "no-altivec"))
886 rs6000_altivec_abi = 0;
887 else if (! strcmp (rs6000_abi_string, "spe"))
889 rs6000_spe_abi = 1;
890 if (!TARGET_SPE_ABI)
891 error ("not configured for ABI: '%s'", rs6000_abi_string);
894 else if (! strcmp (rs6000_abi_string, "no-spe"))
895 rs6000_spe_abi = 0;
896 else
897 error ("unknown ABI specified: '%s'", rs6000_abi_string);
900 /* Handle -malign-XXXXXX options. */
901 static void
902 rs6000_parse_alignment_option ()
904 if (rs6000_alignment_string == 0
905 || ! strcmp (rs6000_alignment_string, "power"))
906 rs6000_alignment_flags = MASK_ALIGN_POWER;
907 else if (! strcmp (rs6000_alignment_string, "natural"))
908 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
909 else
910 error ("unknown -malign-XXXXX option specified: '%s'",
911 rs6000_alignment_string);
914 /* Validate and record the size specified with the -mtls-size option. */
916 static void
917 rs6000_parse_tls_size_option ()
919 if (rs6000_tls_size_string == 0)
920 return;
921 else if (strcmp (rs6000_tls_size_string, "16") == 0)
922 rs6000_tls_size = 16;
923 else if (strcmp (rs6000_tls_size_string, "32") == 0)
924 rs6000_tls_size = 32;
925 else if (strcmp (rs6000_tls_size_string, "64") == 0)
926 rs6000_tls_size = 64;
927 else
928 error ("bad value `%s' for -mtls-size switch", rs6000_tls_size_string);
931 void
932 optimization_options (level, size)
933 int level ATTRIBUTE_UNUSED;
934 int size ATTRIBUTE_UNUSED;
938 /* Do anything needed at the start of the asm file. */
940 void
941 rs6000_file_start (file, default_cpu)
942 FILE *file;
943 const char *default_cpu;
945 size_t i;
946 char buffer[80];
947 const char *start = buffer;
948 struct rs6000_cpu_select *ptr;
950 if (flag_verbose_asm)
952 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
953 rs6000_select[0].string = default_cpu;
955 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
957 ptr = &rs6000_select[i];
958 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
960 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
961 start = "";
965 #ifdef USING_ELFOS_H
966 switch (rs6000_sdata)
968 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
969 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
970 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
971 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
974 if (rs6000_sdata && g_switch_value)
976 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
977 g_switch_value);
978 start = "";
980 #endif
982 if (*start == '\0')
983 putc ('\n', file);
987 /* Return nonzero if this function is known to have a null epilogue. */
990 direct_return ()
992 if (reload_completed)
994 rs6000_stack_t *info = rs6000_stack_info ();
996 if (info->first_gp_reg_save == 32
997 && info->first_fp_reg_save == 64
998 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
999 && ! info->lr_save_p
1000 && ! info->cr_save_p
1001 && info->vrsave_mask == 0
1002 && ! info->push_p)
1003 return 1;
1006 return 0;
1009 /* Returns 1 always. */
1012 any_operand (op, mode)
1013 rtx op ATTRIBUTE_UNUSED;
1014 enum machine_mode mode ATTRIBUTE_UNUSED;
1016 return 1;
1019 /* Returns 1 if op is the count register. */
1021 count_register_operand (op, mode)
1022 rtx op;
1023 enum machine_mode mode ATTRIBUTE_UNUSED;
1025 if (GET_CODE (op) != REG)
1026 return 0;
1028 if (REGNO (op) == COUNT_REGISTER_REGNUM)
1029 return 1;
1031 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
1032 return 1;
1034 return 0;
1037 /* Returns 1 if op is an altivec register. */
1039 altivec_register_operand (op, mode)
1040 rtx op;
1041 enum machine_mode mode ATTRIBUTE_UNUSED;
1044 return (register_operand (op, mode)
1045 && (GET_CODE (op) != REG
1046 || REGNO (op) > FIRST_PSEUDO_REGISTER
1047 || ALTIVEC_REGNO_P (REGNO (op))));
1051 xer_operand (op, mode)
1052 rtx op;
1053 enum machine_mode mode ATTRIBUTE_UNUSED;
1055 if (GET_CODE (op) != REG)
1056 return 0;
1058 if (XER_REGNO_P (REGNO (op)))
1059 return 1;
1061 return 0;
1064 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
1065 by such constants completes more quickly. */
1068 s8bit_cint_operand (op, mode)
1069 rtx op;
1070 enum machine_mode mode ATTRIBUTE_UNUSED;
1072 return ( GET_CODE (op) == CONST_INT
1073 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
1076 /* Return 1 if OP is a constant that can fit in a D field. */
1079 short_cint_operand (op, mode)
1080 rtx op;
1081 enum machine_mode mode ATTRIBUTE_UNUSED;
1083 return (GET_CODE (op) == CONST_INT
1084 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
1087 /* Similar for an unsigned D field. */
1090 u_short_cint_operand (op, mode)
1091 rtx op;
1092 enum machine_mode mode ATTRIBUTE_UNUSED;
1094 return (GET_CODE (op) == CONST_INT
1095 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
1098 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
1101 non_short_cint_operand (op, mode)
1102 rtx op;
1103 enum machine_mode mode ATTRIBUTE_UNUSED;
1105 return (GET_CODE (op) == CONST_INT
1106 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
1109 /* Returns 1 if OP is a CONST_INT that is a positive value
1110 and an exact power of 2. */
1113 exact_log2_cint_operand (op, mode)
1114 rtx op;
1115 enum machine_mode mode ATTRIBUTE_UNUSED;
1117 return (GET_CODE (op) == CONST_INT
1118 && INTVAL (op) > 0
1119 && exact_log2 (INTVAL (op)) >= 0);
1122 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
1123 ctr, or lr). */
1126 gpc_reg_operand (op, mode)
1127 rtx op;
1128 enum machine_mode mode;
1130 return (register_operand (op, mode)
1131 && (GET_CODE (op) != REG
1132 || (REGNO (op) >= ARG_POINTER_REGNUM
1133 && !XER_REGNO_P (REGNO (op)))
1134 || REGNO (op) < MQ_REGNO));
1137 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1138 CR field. */
1141 cc_reg_operand (op, mode)
1142 rtx op;
1143 enum machine_mode mode;
1145 return (register_operand (op, mode)
1146 && (GET_CODE (op) != REG
1147 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1148 || CR_REGNO_P (REGNO (op))));
1151 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1152 CR field that isn't CR0. */
1155 cc_reg_not_cr0_operand (op, mode)
1156 rtx op;
1157 enum machine_mode mode;
1159 return (register_operand (op, mode)
1160 && (GET_CODE (op) != REG
1161 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1162 || CR_REGNO_NOT_CR0_P (REGNO (op))));
1165 /* Returns 1 if OP is either a constant integer valid for a D-field or
1166 a non-special register. If a register, it must be in the proper
1167 mode unless MODE is VOIDmode. */
1170 reg_or_short_operand (op, mode)
1171 rtx op;
1172 enum machine_mode mode;
1174 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1177 /* Similar, except check if the negation of the constant would be
1178 valid for a D-field. */
1181 reg_or_neg_short_operand (op, mode)
1182 rtx op;
1183 enum machine_mode mode;
1185 if (GET_CODE (op) == CONST_INT)
1186 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
1188 return gpc_reg_operand (op, mode);
1191 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1192 a non-special register. If a register, it must be in the proper
1193 mode unless MODE is VOIDmode. */
1196 reg_or_aligned_short_operand (op, mode)
1197 rtx op;
1198 enum machine_mode mode;
1200 if (gpc_reg_operand (op, mode))
1201 return 1;
1202 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
1203 return 1;
1205 return 0;
1209 /* Return 1 if the operand is either a register or an integer whose
1210 high-order 16 bits are zero. */
1213 reg_or_u_short_operand (op, mode)
1214 rtx op;
1215 enum machine_mode mode;
1217 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1220 /* Return 1 is the operand is either a non-special register or ANY
1221 constant integer. */
1224 reg_or_cint_operand (op, mode)
1225 rtx op;
1226 enum machine_mode mode;
1228 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
1231 /* Return 1 is the operand is either a non-special register or ANY
1232 32-bit signed constant integer. */
1235 reg_or_arith_cint_operand (op, mode)
1236 rtx op;
1237 enum machine_mode mode;
1239 return (gpc_reg_operand (op, mode)
1240 || (GET_CODE (op) == CONST_INT
1241 #if HOST_BITS_PER_WIDE_INT != 32
1242 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
1243 < (unsigned HOST_WIDE_INT) 0x100000000ll)
1244 #endif
1248 /* Return 1 is the operand is either a non-special register or a 32-bit
1249 signed constant integer valid for 64-bit addition. */
1252 reg_or_add_cint64_operand (op, mode)
1253 rtx op;
1254 enum machine_mode mode;
1256 return (gpc_reg_operand (op, mode)
1257 || (GET_CODE (op) == CONST_INT
1258 #if HOST_BITS_PER_WIDE_INT == 32
1259 && INTVAL (op) < 0x7fff8000
1260 #else
1261 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
1262 < 0x100000000ll)
1263 #endif
1267 /* Return 1 is the operand is either a non-special register or a 32-bit
1268 signed constant integer valid for 64-bit subtraction. */
1271 reg_or_sub_cint64_operand (op, mode)
1272 rtx op;
1273 enum machine_mode mode;
1275 return (gpc_reg_operand (op, mode)
1276 || (GET_CODE (op) == CONST_INT
1277 #if HOST_BITS_PER_WIDE_INT == 32
1278 && (- INTVAL (op)) < 0x7fff8000
1279 #else
1280 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1281 < 0x100000000ll)
1282 #endif
1286 /* Return 1 is the operand is either a non-special register or ANY
1287 32-bit unsigned constant integer. */
1290 reg_or_logical_cint_operand (op, mode)
1291 rtx op;
1292 enum machine_mode mode;
1294 if (GET_CODE (op) == CONST_INT)
1296 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1298 if (GET_MODE_BITSIZE (mode) <= 32)
1299 abort ();
1301 if (INTVAL (op) < 0)
1302 return 0;
1305 return ((INTVAL (op) & GET_MODE_MASK (mode)
1306 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1308 else if (GET_CODE (op) == CONST_DOUBLE)
1310 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1311 || mode != DImode)
1312 abort ();
1314 return CONST_DOUBLE_HIGH (op) == 0;
1316 else
1317 return gpc_reg_operand (op, mode);
1320 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1323 got_operand (op, mode)
1324 rtx op;
1325 enum machine_mode mode ATTRIBUTE_UNUSED;
1327 return (GET_CODE (op) == SYMBOL_REF
1328 || GET_CODE (op) == CONST
1329 || GET_CODE (op) == LABEL_REF);
1332 /* Return 1 if the operand is a simple references that can be loaded via
1333 the GOT (labels involving addition aren't allowed). */
1336 got_no_const_operand (op, mode)
1337 rtx op;
1338 enum machine_mode mode ATTRIBUTE_UNUSED;
1340 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1343 /* Return the number of instructions it takes to form a constant in an
1344 integer register. */
1346 static int
1347 num_insns_constant_wide (value)
1348 HOST_WIDE_INT value;
1350 /* signed constant loadable with {cal|addi} */
1351 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1352 return 1;
1354 /* constant loadable with {cau|addis} */
1355 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1356 return 1;
1358 #if HOST_BITS_PER_WIDE_INT == 64
1359 else if (TARGET_POWERPC64)
1361 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1362 HOST_WIDE_INT high = value >> 31;
1364 if (high == 0 || high == -1)
1365 return 2;
1367 high >>= 1;
1369 if (low == 0)
1370 return num_insns_constant_wide (high) + 1;
1371 else
1372 return (num_insns_constant_wide (high)
1373 + num_insns_constant_wide (low) + 1);
1375 #endif
1377 else
1378 return 2;
1382 num_insns_constant (op, mode)
1383 rtx op;
1384 enum machine_mode mode;
1386 if (GET_CODE (op) == CONST_INT)
1388 #if HOST_BITS_PER_WIDE_INT == 64
1389 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1390 && mask64_operand (op, mode))
1391 return 2;
1392 else
1393 #endif
1394 return num_insns_constant_wide (INTVAL (op));
1397 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1399 long l;
1400 REAL_VALUE_TYPE rv;
1402 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1403 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1404 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1407 else if (GET_CODE (op) == CONST_DOUBLE)
1409 HOST_WIDE_INT low;
1410 HOST_WIDE_INT high;
1411 long l[2];
1412 REAL_VALUE_TYPE rv;
1413 int endian = (WORDS_BIG_ENDIAN == 0);
1415 if (mode == VOIDmode || mode == DImode)
1417 high = CONST_DOUBLE_HIGH (op);
1418 low = CONST_DOUBLE_LOW (op);
1420 else
1422 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1423 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1424 high = l[endian];
1425 low = l[1 - endian];
1428 if (TARGET_32BIT)
1429 return (num_insns_constant_wide (low)
1430 + num_insns_constant_wide (high));
1432 else
1434 if (high == 0 && low >= 0)
1435 return num_insns_constant_wide (low);
1437 else if (high == -1 && low < 0)
1438 return num_insns_constant_wide (low);
1440 else if (mask64_operand (op, mode))
1441 return 2;
1443 else if (low == 0)
1444 return num_insns_constant_wide (high) + 1;
1446 else
1447 return (num_insns_constant_wide (high)
1448 + num_insns_constant_wide (low) + 1);
1452 else
1453 abort ();
1456 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1457 register with one instruction per word. We only do this if we can
1458 safely read CONST_DOUBLE_{LOW,HIGH}. */
1461 easy_fp_constant (op, mode)
1462 rtx op;
1463 enum machine_mode mode;
1465 if (GET_CODE (op) != CONST_DOUBLE
1466 || GET_MODE (op) != mode
1467 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1468 return 0;
1470 /* Consider all constants with -msoft-float to be easy. */
1471 if ((TARGET_SOFT_FLOAT || !TARGET_FPRS)
1472 && mode != DImode)
1473 return 1;
1475 /* If we are using V.4 style PIC, consider all constants to be hard. */
1476 if (flag_pic && DEFAULT_ABI == ABI_V4)
1477 return 0;
1479 #ifdef TARGET_RELOCATABLE
1480 /* Similarly if we are using -mrelocatable, consider all constants
1481 to be hard. */
1482 if (TARGET_RELOCATABLE)
1483 return 0;
1484 #endif
1486 if (mode == TFmode)
1488 long k[4];
1489 REAL_VALUE_TYPE rv;
1491 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1492 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1494 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1495 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1
1496 && num_insns_constant_wide ((HOST_WIDE_INT) k[2]) == 1
1497 && num_insns_constant_wide ((HOST_WIDE_INT) k[3]) == 1);
1500 else if (mode == DFmode)
1502 long k[2];
1503 REAL_VALUE_TYPE rv;
1505 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1506 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1508 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1509 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1512 else if (mode == SFmode)
1514 long l;
1515 REAL_VALUE_TYPE rv;
1517 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1518 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1520 return num_insns_constant_wide (l) == 1;
1523 else if (mode == DImode)
1524 return ((TARGET_POWERPC64
1525 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1526 || (num_insns_constant (op, DImode) <= 2));
1528 else if (mode == SImode)
1529 return 1;
1530 else
1531 abort ();
1534 /* Return non zero if all elements of a vector have the same value. */
1536 static int
1537 easy_vector_same (op, mode)
1538 rtx op;
1539 enum machine_mode mode ATTRIBUTE_UNUSED;
1541 int units, i, cst;
1543 units = CONST_VECTOR_NUNITS (op);
1545 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1546 for (i = 1; i < units; ++i)
1547 if (INTVAL (CONST_VECTOR_ELT (op, i)) != cst)
1548 break;
1549 if (i == units)
1550 return 1;
1551 return 0;
1554 /* Return 1 if the operand is a CONST_INT and can be put into a
1555 register without using memory. */
1558 easy_vector_constant (op, mode)
1559 rtx op;
1560 enum machine_mode mode;
1562 int cst, cst2;
1564 if (GET_CODE (op) != CONST_VECTOR
1565 || (!TARGET_ALTIVEC
1566 && !TARGET_SPE))
1567 return 0;
1569 if (zero_constant (op, mode)
1570 && ((TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
1571 || (TARGET_SPE && SPE_VECTOR_MODE (mode))))
1572 return 1;
1574 if (GET_MODE_CLASS (mode) != MODE_VECTOR_INT)
1575 return 0;
1577 if (TARGET_SPE && mode == V1DImode)
1578 return 0;
1580 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1581 cst2 = INTVAL (CONST_VECTOR_ELT (op, 1));
1583 /* Limit SPE vectors to 15 bits signed. These we can generate with:
1584 li r0, CONSTANT1
1585 evmergelo r0, r0, r0
1586 li r0, CONSTANT2
1588 I don't know how efficient it would be to allow bigger constants,
1589 considering we'll have an extra 'ori' for every 'li'. I doubt 5
1590 instructions is better than a 64-bit memory load, but I don't
1591 have the e500 timing specs. */
1592 if (TARGET_SPE && mode == V2SImode
1593 && cst >= -0x7fff && cst <= 0x7fff
1594 && cst2 >= -0x7fff && cst2 <= 0x7fff)
1595 return 1;
1597 if (TARGET_ALTIVEC && EASY_VECTOR_15 (cst, op, mode))
1598 return 1;
1600 if (TARGET_ALTIVEC && EASY_VECTOR_15_ADD_SELF (cst, op, mode))
1601 return 1;
1603 return 0;
1606 /* Same as easy_vector_constant but only for EASY_VECTOR_15_ADD_SELF. */
1609 easy_vector_constant_add_self (op, mode)
1610 rtx op;
1611 enum machine_mode mode;
1613 int cst;
1615 if (!easy_vector_constant (op, mode))
1616 return 0;
1618 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1620 return TARGET_ALTIVEC && EASY_VECTOR_15_ADD_SELF (cst, op, mode);
1623 const char *
1624 output_vec_const_move (operands)
1625 rtx *operands;
1627 int cst, cst2;
1628 enum machine_mode mode;
1629 rtx dest, vec;
1631 dest = operands[0];
1632 vec = operands[1];
1634 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
1635 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
1636 mode = GET_MODE (dest);
1638 if (TARGET_ALTIVEC)
1640 if (zero_constant (vec, mode))
1641 return "vxor %0,%0,%0";
1642 else if (EASY_VECTOR_15 (cst, vec, mode))
1644 operands[1] = GEN_INT (cst);
1645 switch (mode)
1647 case V4SImode:
1648 return "vspltisw %0,%1";
1649 case V8HImode:
1650 return "vspltish %0,%1";
1651 case V16QImode:
1652 return "vspltisb %0,%1";
1653 default:
1654 abort ();
1657 else if (EASY_VECTOR_15_ADD_SELF (cst, vec, mode))
1658 return "#";
1659 else
1660 abort ();
1663 if (TARGET_SPE)
1665 /* Vector constant 0 is handled as a splitter of V2SI, and in the
1666 pattern of V1DI, V4HI, and V2SF.
1668 FIXME: We should probabl return # and add post reload
1669 splitters for these, but this way is so easy ;-).
1671 operands[1] = GEN_INT (cst);
1672 operands[2] = GEN_INT (cst2);
1673 if (cst == cst2)
1674 return "li %0,%1\n\tevmergelo %0,%0,%0";
1675 else
1676 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
1679 abort ();
1682 /* Return 1 if the operand is the constant 0. This works for scalars
1683 as well as vectors. */
1685 zero_constant (op, mode)
1686 rtx op;
1687 enum machine_mode mode;
1689 return op == CONST0_RTX (mode);
1692 /* Return 1 if the operand is 0.0. */
1694 zero_fp_constant (op, mode)
1695 rtx op;
1696 enum machine_mode mode;
1698 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1701 /* Return 1 if the operand is in volatile memory. Note that during
1702 the RTL generation phase, memory_operand does not return TRUE for
1703 volatile memory references. So this function allows us to
1704 recognize volatile references where its safe. */
1707 volatile_mem_operand (op, mode)
1708 rtx op;
1709 enum machine_mode mode;
1711 if (GET_CODE (op) != MEM)
1712 return 0;
1714 if (!MEM_VOLATILE_P (op))
1715 return 0;
1717 if (mode != GET_MODE (op))
1718 return 0;
1720 if (reload_completed)
1721 return memory_operand (op, mode);
1723 if (reload_in_progress)
1724 return strict_memory_address_p (mode, XEXP (op, 0));
1726 return memory_address_p (mode, XEXP (op, 0));
1729 /* Return 1 if the operand is an offsettable memory operand. */
1732 offsettable_mem_operand (op, mode)
1733 rtx op;
1734 enum machine_mode mode;
1736 return ((GET_CODE (op) == MEM)
1737 && offsettable_address_p (reload_completed || reload_in_progress,
1738 mode, XEXP (op, 0)));
1741 /* Return 1 if the operand is either an easy FP constant (see above) or
1742 memory. */
1745 mem_or_easy_const_operand (op, mode)
1746 rtx op;
1747 enum machine_mode mode;
1749 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1752 /* Return 1 if the operand is either a non-special register or an item
1753 that can be used as the operand of a `mode' add insn. */
1756 add_operand (op, mode)
1757 rtx op;
1758 enum machine_mode mode;
1760 if (GET_CODE (op) == CONST_INT)
1761 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1762 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1764 return gpc_reg_operand (op, mode);
1767 /* Return 1 if OP is a constant but not a valid add_operand. */
1770 non_add_cint_operand (op, mode)
1771 rtx op;
1772 enum machine_mode mode ATTRIBUTE_UNUSED;
1774 return (GET_CODE (op) == CONST_INT
1775 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1776 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1779 /* Return 1 if the operand is a non-special register or a constant that
1780 can be used as the operand of an OR or XOR insn on the RS/6000. */
1783 logical_operand (op, mode)
1784 rtx op;
1785 enum machine_mode mode;
1787 HOST_WIDE_INT opl, oph;
1789 if (gpc_reg_operand (op, mode))
1790 return 1;
1792 if (GET_CODE (op) == CONST_INT)
1794 opl = INTVAL (op) & GET_MODE_MASK (mode);
1796 #if HOST_BITS_PER_WIDE_INT <= 32
1797 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1798 return 0;
1799 #endif
1801 else if (GET_CODE (op) == CONST_DOUBLE)
1803 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1804 abort ();
1806 opl = CONST_DOUBLE_LOW (op);
1807 oph = CONST_DOUBLE_HIGH (op);
1808 if (oph != 0)
1809 return 0;
1811 else
1812 return 0;
1814 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1815 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1818 /* Return 1 if C is a constant that is not a logical operand (as
1819 above), but could be split into one. */
1822 non_logical_cint_operand (op, mode)
1823 rtx op;
1824 enum machine_mode mode;
1826 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1827 && ! logical_operand (op, mode)
1828 && reg_or_logical_cint_operand (op, mode));
1831 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1832 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1833 Reject all ones and all zeros, since these should have been optimized
1834 away and confuse the making of MB and ME. */
1837 mask_operand (op, mode)
1838 rtx op;
1839 enum machine_mode mode ATTRIBUTE_UNUSED;
1841 HOST_WIDE_INT c, lsb;
1843 if (GET_CODE (op) != CONST_INT)
1844 return 0;
1846 c = INTVAL (op);
1848 /* Fail in 64-bit mode if the mask wraps around because the upper
1849 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1850 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
1851 return 0;
1853 /* We don't change the number of transitions by inverting,
1854 so make sure we start with the LS bit zero. */
1855 if (c & 1)
1856 c = ~c;
1858 /* Reject all zeros or all ones. */
1859 if (c == 0)
1860 return 0;
1862 /* Find the first transition. */
1863 lsb = c & -c;
1865 /* Invert to look for a second transition. */
1866 c = ~c;
1868 /* Erase first transition. */
1869 c &= -lsb;
1871 /* Find the second transition (if any). */
1872 lsb = c & -c;
1874 /* Match if all the bits above are 1's (or c is zero). */
1875 return c == -lsb;
1878 /* Return 1 for the PowerPC64 rlwinm corner case. */
1881 mask_operand_wrap (op, mode)
1882 rtx op;
1883 enum machine_mode mode ATTRIBUTE_UNUSED;
1885 HOST_WIDE_INT c, lsb;
1887 if (GET_CODE (op) != CONST_INT)
1888 return 0;
1890 c = INTVAL (op);
1892 if ((c & 0x80000001) != 0x80000001)
1893 return 0;
1895 c = ~c;
1896 if (c == 0)
1897 return 0;
1899 lsb = c & -c;
1900 c = ~c;
1901 c &= -lsb;
1902 lsb = c & -c;
1903 return c == -lsb;
1906 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1907 It is if there are no more than one 1->0 or 0->1 transitions.
1908 Reject all zeros, since zero should have been optimized away and
1909 confuses the making of MB and ME. */
1912 mask64_operand (op, mode)
1913 rtx op;
1914 enum machine_mode mode ATTRIBUTE_UNUSED;
1916 if (GET_CODE (op) == CONST_INT)
1918 HOST_WIDE_INT c, lsb;
1920 c = INTVAL (op);
1922 /* Reject all zeros. */
1923 if (c == 0)
1924 return 0;
1926 /* We don't change the number of transitions by inverting,
1927 so make sure we start with the LS bit zero. */
1928 if (c & 1)
1929 c = ~c;
1931 /* Find the transition, and check that all bits above are 1's. */
1932 lsb = c & -c;
1934 /* Match if all the bits above are 1's (or c is zero). */
1935 return c == -lsb;
1937 return 0;
1940 /* Like mask64_operand, but allow up to three transitions. This
1941 predicate is used by insn patterns that generate two rldicl or
1942 rldicr machine insns. */
1945 mask64_2_operand (op, mode)
1946 rtx op;
1947 enum machine_mode mode ATTRIBUTE_UNUSED;
1949 if (GET_CODE (op) == CONST_INT)
1951 HOST_WIDE_INT c, lsb;
1953 c = INTVAL (op);
1955 /* Disallow all zeros. */
1956 if (c == 0)
1957 return 0;
1959 /* We don't change the number of transitions by inverting,
1960 so make sure we start with the LS bit zero. */
1961 if (c & 1)
1962 c = ~c;
1964 /* Find the first transition. */
1965 lsb = c & -c;
1967 /* Invert to look for a second transition. */
1968 c = ~c;
1970 /* Erase first transition. */
1971 c &= -lsb;
1973 /* Find the second transition. */
1974 lsb = c & -c;
1976 /* Invert to look for a third transition. */
1977 c = ~c;
1979 /* Erase second transition. */
1980 c &= -lsb;
1982 /* Find the third transition (if any). */
1983 lsb = c & -c;
1985 /* Match if all the bits above are 1's (or c is zero). */
1986 return c == -lsb;
1988 return 0;
1991 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
1992 implement ANDing by the mask IN. */
1993 void
1994 build_mask64_2_operands (in, out)
1995 rtx in;
1996 rtx *out;
1998 #if HOST_BITS_PER_WIDE_INT >= 64
1999 unsigned HOST_WIDE_INT c, lsb, m1, m2;
2000 int shift;
2002 if (GET_CODE (in) != CONST_INT)
2003 abort ();
2005 c = INTVAL (in);
2006 if (c & 1)
2008 /* Assume c initially something like 0x00fff000000fffff. The idea
2009 is to rotate the word so that the middle ^^^^^^ group of zeros
2010 is at the MS end and can be cleared with an rldicl mask. We then
2011 rotate back and clear off the MS ^^ group of zeros with a
2012 second rldicl. */
2013 c = ~c; /* c == 0xff000ffffff00000 */
2014 lsb = c & -c; /* lsb == 0x0000000000100000 */
2015 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
2016 c = ~c; /* c == 0x00fff000000fffff */
2017 c &= -lsb; /* c == 0x00fff00000000000 */
2018 lsb = c & -c; /* lsb == 0x0000100000000000 */
2019 c = ~c; /* c == 0xff000fffffffffff */
2020 c &= -lsb; /* c == 0xff00000000000000 */
2021 shift = 0;
2022 while ((lsb >>= 1) != 0)
2023 shift++; /* shift == 44 on exit from loop */
2024 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
2025 m1 = ~m1; /* m1 == 0x000000ffffffffff */
2026 m2 = ~c; /* m2 == 0x00ffffffffffffff */
2028 else
2030 /* Assume c initially something like 0xff000f0000000000. The idea
2031 is to rotate the word so that the ^^^ middle group of zeros
2032 is at the LS end and can be cleared with an rldicr mask. We then
2033 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2034 a second rldicr. */
2035 lsb = c & -c; /* lsb == 0x0000010000000000 */
2036 m2 = -lsb; /* m2 == 0xffffff0000000000 */
2037 c = ~c; /* c == 0x00fff0ffffffffff */
2038 c &= -lsb; /* c == 0x00fff00000000000 */
2039 lsb = c & -c; /* lsb == 0x0000100000000000 */
2040 c = ~c; /* c == 0xff000fffffffffff */
2041 c &= -lsb; /* c == 0xff00000000000000 */
2042 shift = 0;
2043 while ((lsb >>= 1) != 0)
2044 shift++; /* shift == 44 on exit from loop */
2045 m1 = ~c; /* m1 == 0x00ffffffffffffff */
2046 m1 >>= shift; /* m1 == 0x0000000000000fff */
2047 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
2050 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2051 masks will be all 1's. We are guaranteed more than one transition. */
2052 out[0] = GEN_INT (64 - shift);
2053 out[1] = GEN_INT (m1);
2054 out[2] = GEN_INT (shift);
2055 out[3] = GEN_INT (m2);
2056 #else
2057 (void)in;
2058 (void)out;
2059 abort ();
2060 #endif
2063 /* Return 1 if the operand is either a non-special register or a constant
2064 that can be used as the operand of a PowerPC64 logical AND insn. */
2067 and64_operand (op, mode)
2068 rtx op;
2069 enum machine_mode mode;
2071 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2072 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
2074 return (logical_operand (op, mode) || mask64_operand (op, mode));
2077 /* Like the above, but also match constants that can be implemented
2078 with two rldicl or rldicr insns. */
2081 and64_2_operand (op, mode)
2082 rtx op;
2083 enum machine_mode mode;
2085 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2086 return gpc_reg_operand (op, mode) || mask64_2_operand (op, mode);
2088 return logical_operand (op, mode) || mask64_2_operand (op, mode);
2091 /* Return 1 if the operand is either a non-special register or a
2092 constant that can be used as the operand of an RS/6000 logical AND insn. */
2095 and_operand (op, mode)
2096 rtx op;
2097 enum machine_mode mode;
2099 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2100 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
2102 return (logical_operand (op, mode) || mask_operand (op, mode));
2105 /* Return 1 if the operand is a general register or memory operand. */
2108 reg_or_mem_operand (op, mode)
2109 rtx op;
2110 enum machine_mode mode;
2112 return (gpc_reg_operand (op, mode)
2113 || memory_operand (op, mode)
2114 || volatile_mem_operand (op, mode));
2117 /* Return 1 if the operand is a general register or memory operand without
2118 pre_inc or pre_dec which produces invalid form of PowerPC lwa
2119 instruction. */
2122 lwa_operand (op, mode)
2123 rtx op;
2124 enum machine_mode mode;
2126 rtx inner = op;
2128 if (reload_completed && GET_CODE (inner) == SUBREG)
2129 inner = SUBREG_REG (inner);
2131 return gpc_reg_operand (inner, mode)
2132 || (memory_operand (inner, mode)
2133 && GET_CODE (XEXP (inner, 0)) != PRE_INC
2134 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
2135 && (GET_CODE (XEXP (inner, 0)) != PLUS
2136 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
2137 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
2140 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
2143 symbol_ref_operand (op, mode)
2144 rtx op;
2145 enum machine_mode mode;
2147 if (mode != VOIDmode && GET_MODE (op) != mode)
2148 return 0;
2150 return (GET_CODE (op) == SYMBOL_REF
2151 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op)));
2154 /* Return 1 if the operand, used inside a MEM, is a valid first argument
2155 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
2158 call_operand (op, mode)
2159 rtx op;
2160 enum machine_mode mode;
2162 if (mode != VOIDmode && GET_MODE (op) != mode)
2163 return 0;
2165 return (GET_CODE (op) == SYMBOL_REF
2166 || (GET_CODE (op) == REG
2167 && (REGNO (op) == LINK_REGISTER_REGNUM
2168 || REGNO (op) == COUNT_REGISTER_REGNUM
2169 || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
2172 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
2173 this file. */
2176 current_file_function_operand (op, mode)
2177 rtx op;
2178 enum machine_mode mode ATTRIBUTE_UNUSED;
2180 return (GET_CODE (op) == SYMBOL_REF
2181 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op))
2182 && (SYMBOL_REF_LOCAL_P (op)
2183 || (op == XEXP (DECL_RTL (current_function_decl), 0))));
2186 /* Return 1 if this operand is a valid input for a move insn. */
2189 input_operand (op, mode)
2190 rtx op;
2191 enum machine_mode mode;
2193 /* Memory is always valid. */
2194 if (memory_operand (op, mode))
2195 return 1;
2197 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
2198 if (GET_CODE (op) == CONSTANT_P_RTX)
2199 return 1;
2201 /* For floating-point, easy constants are valid. */
2202 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2203 && CONSTANT_P (op)
2204 && easy_fp_constant (op, mode))
2205 return 1;
2207 /* Allow any integer constant. */
2208 if (GET_MODE_CLASS (mode) == MODE_INT
2209 && (GET_CODE (op) == CONST_INT
2210 || GET_CODE (op) == CONST_DOUBLE))
2211 return 1;
2213 /* Allow easy vector constants. */
2214 if (GET_CODE (op) == CONST_VECTOR
2215 && easy_vector_constant (op, mode))
2216 return 1;
2218 /* For floating-point or multi-word mode, the only remaining valid type
2219 is a register. */
2220 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2221 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2222 return register_operand (op, mode);
2224 /* The only cases left are integral modes one word or smaller (we
2225 do not get called for MODE_CC values). These can be in any
2226 register. */
2227 if (register_operand (op, mode))
2228 return 1;
2230 /* A SYMBOL_REF referring to the TOC is valid. */
2231 if (legitimate_constant_pool_address_p (op))
2232 return 1;
2234 /* A constant pool expression (relative to the TOC) is valid */
2235 if (toc_relative_expr_p (op))
2236 return 1;
2238 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
2239 to be valid. */
2240 if (DEFAULT_ABI == ABI_V4
2241 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
2242 && small_data_operand (op, Pmode))
2243 return 1;
2245 return 0;
2248 /* Return 1 for an operand in small memory on V.4/eabi. */
2251 small_data_operand (op, mode)
2252 rtx op ATTRIBUTE_UNUSED;
2253 enum machine_mode mode ATTRIBUTE_UNUSED;
2255 #if TARGET_ELF
2256 rtx sym_ref;
2258 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
2259 return 0;
2261 if (DEFAULT_ABI != ABI_V4)
2262 return 0;
2264 if (GET_CODE (op) == SYMBOL_REF)
2265 sym_ref = op;
2267 else if (GET_CODE (op) != CONST
2268 || GET_CODE (XEXP (op, 0)) != PLUS
2269 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2270 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
2271 return 0;
2273 else
2275 rtx sum = XEXP (op, 0);
2276 HOST_WIDE_INT summand;
2278 /* We have to be careful here, because it is the referenced address
2279 that must be 32k from _SDA_BASE_, not just the symbol. */
2280 summand = INTVAL (XEXP (sum, 1));
2281 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
2282 return 0;
2284 sym_ref = XEXP (sum, 0);
2287 return SYMBOL_REF_SMALL_P (sym_ref);
2288 #else
2289 return 0;
2290 #endif
2293 /* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
2295 static int
2296 constant_pool_expr_1 (op, have_sym, have_toc)
2297 rtx op;
2298 int *have_sym;
2299 int *have_toc;
2301 switch (GET_CODE(op))
2303 case SYMBOL_REF:
2304 if (RS6000_SYMBOL_REF_TLS_P (op))
2305 return 0;
2306 else if (CONSTANT_POOL_ADDRESS_P (op))
2308 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2310 *have_sym = 1;
2311 return 1;
2313 else
2314 return 0;
2316 else if (! strcmp (XSTR (op, 0), toc_label_name))
2318 *have_toc = 1;
2319 return 1;
2321 else
2322 return 0;
2323 case PLUS:
2324 case MINUS:
2325 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2326 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2327 case CONST:
2328 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2329 case CONST_INT:
2330 return 1;
2331 default:
2332 return 0;
2336 static bool
2337 constant_pool_expr_p (op)
2338 rtx op;
2340 int have_sym = 0;
2341 int have_toc = 0;
2342 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2345 static bool
2346 toc_relative_expr_p (op)
2347 rtx op;
2349 int have_sym = 0;
2350 int have_toc = 0;
2351 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2354 /* SPE offset addressing is limited to 5-bits worth of double words. */
2355 #define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
2357 bool
2358 legitimate_constant_pool_address_p (x)
2359 rtx x;
2361 return (TARGET_TOC
2362 && GET_CODE (x) == PLUS
2363 && GET_CODE (XEXP (x, 0)) == REG
2364 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
2365 && constant_pool_expr_p (XEXP (x, 1)));
2368 static bool
2369 legitimate_small_data_p (mode, x)
2370 enum machine_mode mode;
2371 rtx x;
2373 return (DEFAULT_ABI == ABI_V4
2374 && !flag_pic && !TARGET_TOC
2375 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
2376 && small_data_operand (x, mode));
2379 static bool
2380 legitimate_offset_address_p (mode, x, strict)
2381 enum machine_mode mode;
2382 rtx x;
2383 int strict;
2385 unsigned HOST_WIDE_INT offset, extra;
2387 if (GET_CODE (x) != PLUS)
2388 return false;
2389 if (GET_CODE (XEXP (x, 0)) != REG)
2390 return false;
2391 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2392 return false;
2393 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
2394 return false;
2396 offset = INTVAL (XEXP (x, 1));
2397 extra = 0;
2398 switch (mode)
2400 case V16QImode:
2401 case V8HImode:
2402 case V4SFmode:
2403 case V4SImode:
2404 /* AltiVec vector modes. Only reg+reg addressing is valid here,
2405 which leaves the only valid constant offset of zero, which by
2406 canonicalization rules is also invalid. */
2407 return false;
2409 case V4HImode:
2410 case V2SImode:
2411 case V1DImode:
2412 case V2SFmode:
2413 /* SPE vector modes. */
2414 return SPE_CONST_OFFSET_OK (offset);
2416 case DFmode:
2417 case DImode:
2418 if (TARGET_32BIT)
2419 extra = 4;
2420 else if (offset & 3)
2421 return false;
2422 break;
2424 case TFmode:
2425 case TImode:
2426 if (TARGET_32BIT)
2427 extra = 12;
2428 else if (offset & 3)
2429 return false;
2430 else
2431 extra = 8;
2432 break;
2434 default:
2435 break;
2438 return (offset + extra >= offset) && (offset + extra + 0x8000 < 0x10000);
2441 static bool
2442 legitimate_indexed_address_p (x, strict)
2443 rtx x;
2444 int strict;
2446 rtx op0, op1;
2448 if (GET_CODE (x) != PLUS)
2449 return false;
2450 op0 = XEXP (x, 0);
2451 op1 = XEXP (x, 1);
2453 if (!REG_P (op0) || !REG_P (op1))
2454 return false;
2456 return ((INT_REG_OK_FOR_BASE_P (op0, strict)
2457 && INT_REG_OK_FOR_INDEX_P (op1, strict))
2458 || (INT_REG_OK_FOR_BASE_P (op1, strict)
2459 && INT_REG_OK_FOR_INDEX_P (op0, strict)));
2462 static inline bool
2463 legitimate_indirect_address_p (x, strict)
2464 rtx x;
2465 int strict;
2467 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
2470 static bool
2471 legitimate_lo_sum_address_p (mode, x, strict)
2472 enum machine_mode mode;
2473 rtx x;
2474 int strict;
2476 if (GET_CODE (x) != LO_SUM)
2477 return false;
2478 if (GET_CODE (XEXP (x, 0)) != REG)
2479 return false;
2480 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2481 return false;
2482 x = XEXP (x, 1);
2484 if (TARGET_ELF)
2486 if (DEFAULT_ABI != ABI_AIX && flag_pic)
2487 return false;
2488 if (TARGET_TOC)
2489 return false;
2490 if (GET_MODE_NUNITS (mode) != 1)
2491 return false;
2492 if (GET_MODE_BITSIZE (mode) > 32
2493 && !(TARGET_HARD_FLOAT && TARGET_FPRS && mode == DFmode))
2494 return false;
2496 return CONSTANT_P (x);
2499 return false;
2503 /* Try machine-dependent ways of modifying an illegitimate address
2504 to be legitimate. If we find one, return the new, valid address.
2505 This is used from only one place: `memory_address' in explow.c.
2507 OLDX is the address as it was before break_out_memory_refs was
2508 called. In some cases it is useful to look at this to decide what
2509 needs to be done.
2511 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2513 It is always safe for this function to do nothing. It exists to
2514 recognize opportunities to optimize the output.
2516 On RS/6000, first check for the sum of a register with a constant
2517 integer that is out of range. If so, generate code to add the
2518 constant with the low-order 16 bits masked to the register and force
2519 this result into another register (this can be done with `cau').
2520 Then generate an address of REG+(CONST&0xffff), allowing for the
2521 possibility of bit 16 being a one.
2523 Then check for the sum of a register and something not constant, try to
2524 load the other things into a register and return the sum. */
2527 rs6000_legitimize_address (x, oldx, mode)
2528 rtx x;
2529 rtx oldx ATTRIBUTE_UNUSED;
2530 enum machine_mode mode;
2532 if (GET_CODE (x) == SYMBOL_REF)
2534 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
2535 if (model != 0)
2536 return rs6000_legitimize_tls_address (x, model);
2539 if (GET_CODE (x) == PLUS
2540 && GET_CODE (XEXP (x, 0)) == REG
2541 && GET_CODE (XEXP (x, 1)) == CONST_INT
2542 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2544 HOST_WIDE_INT high_int, low_int;
2545 rtx sum;
2546 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2547 high_int = INTVAL (XEXP (x, 1)) - low_int;
2548 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2549 GEN_INT (high_int)), 0);
2550 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2552 else if (GET_CODE (x) == PLUS
2553 && GET_CODE (XEXP (x, 0)) == REG
2554 && GET_CODE (XEXP (x, 1)) != CONST_INT
2555 && GET_MODE_NUNITS (mode) == 1
2556 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2557 || TARGET_POWERPC64
2558 || (mode != DFmode && mode != TFmode))
2559 && (TARGET_POWERPC64 || mode != DImode)
2560 && mode != TImode)
2562 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
2563 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
2565 else if (ALTIVEC_VECTOR_MODE (mode))
2567 rtx reg;
2569 /* Make sure both operands are registers. */
2570 if (GET_CODE (x) == PLUS)
2571 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
2572 force_reg (Pmode, XEXP (x, 1)));
2574 reg = force_reg (Pmode, x);
2575 return reg;
2577 else if (SPE_VECTOR_MODE (mode))
2579 /* We accept [reg + reg] and [reg + OFFSET]. */
2581 if (GET_CODE (x) == PLUS)
2583 rtx op1 = XEXP (x, 0);
2584 rtx op2 = XEXP (x, 1);
2586 op1 = force_reg (Pmode, op1);
2588 if (GET_CODE (op2) != REG
2589 && (GET_CODE (op2) != CONST_INT
2590 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
2591 op2 = force_reg (Pmode, op2);
2593 return gen_rtx_PLUS (Pmode, op1, op2);
2596 return force_reg (Pmode, x);
2598 else if (TARGET_ELF
2599 && TARGET_32BIT
2600 && TARGET_NO_TOC
2601 && ! flag_pic
2602 && GET_CODE (x) != CONST_INT
2603 && GET_CODE (x) != CONST_DOUBLE
2604 && CONSTANT_P (x)
2605 && GET_MODE_NUNITS (mode) == 1
2606 && (GET_MODE_BITSIZE (mode) <= 32
2607 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
2609 rtx reg = gen_reg_rtx (Pmode);
2610 emit_insn (gen_elf_high (reg, (x)));
2611 return gen_rtx_LO_SUM (Pmode, reg, (x));
2613 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
2614 && ! flag_pic
2615 #if TARGET_MACHO
2616 && ! MACHO_DYNAMIC_NO_PIC_P
2617 #endif
2618 && GET_CODE (x) != CONST_INT
2619 && GET_CODE (x) != CONST_DOUBLE
2620 && CONSTANT_P (x)
2621 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
2622 && mode != DImode
2623 && mode != TImode)
2625 rtx reg = gen_reg_rtx (Pmode);
2626 emit_insn (gen_macho_high (reg, (x)));
2627 return gen_rtx_LO_SUM (Pmode, reg, (x));
2629 else if (TARGET_TOC
2630 && constant_pool_expr_p (x)
2631 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
2633 return create_TOC_reference (x);
2635 else
2636 return NULL_RTX;
2639 /* Construct the SYMBOL_REF for the tls_get_addr function. */
2641 static GTY(()) rtx rs6000_tls_symbol;
2642 static rtx
2643 rs6000_tls_get_addr ()
2645 if (!rs6000_tls_symbol)
2646 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
2648 return rs6000_tls_symbol;
2651 /* Construct the SYMBOL_REF for TLS GOT references. */
2653 static GTY(()) rtx rs6000_got_symbol;
2654 static rtx
2655 rs6000_got_sym ()
2657 if (!rs6000_got_symbol)
2659 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
2660 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
2661 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
2664 return rs6000_got_symbol;
2667 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
2668 this (thread-local) address. */
2670 static rtx
2671 rs6000_legitimize_tls_address (addr, model)
2672 rtx addr;
2673 enum tls_model model;
2675 rtx dest, insn;
2677 dest = gen_reg_rtx (Pmode);
2678 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
2680 rtx tlsreg;
2682 if (TARGET_64BIT)
2684 tlsreg = gen_rtx_REG (Pmode, 13);
2685 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
2687 else
2689 tlsreg = gen_rtx_REG (Pmode, 2);
2690 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
2692 emit_insn (insn);
2694 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
2696 rtx tlsreg, tmp;
2698 tmp = gen_reg_rtx (Pmode);
2699 if (TARGET_64BIT)
2701 tlsreg = gen_rtx_REG (Pmode, 13);
2702 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
2704 else
2706 tlsreg = gen_rtx_REG (Pmode, 2);
2707 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
2709 emit_insn (insn);
2710 if (TARGET_64BIT)
2711 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
2712 else
2713 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
2714 emit_insn (insn);
2716 else
2718 rtx r3, got, tga, tmp1, tmp2, eqv;
2720 if (TARGET_64BIT)
2721 got = gen_rtx_REG (Pmode, TOC_REGISTER);
2722 else
2724 if (flag_pic == 1)
2725 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
2726 else
2728 rtx gsym = rs6000_got_sym ();
2729 got = gen_reg_rtx (Pmode);
2730 if (flag_pic == 0)
2731 rs6000_emit_move (got, gsym, Pmode);
2732 else
2734 char buf[30];
2735 static int tls_got_labelno = 0;
2736 rtx tempLR, lab, tmp3, mem;
2737 rtx first, last;
2739 ASM_GENERATE_INTERNAL_LABEL (buf, "LTLS", tls_got_labelno++);
2740 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
2741 tempLR = gen_reg_rtx (Pmode);
2742 tmp1 = gen_reg_rtx (Pmode);
2743 tmp2 = gen_reg_rtx (Pmode);
2744 tmp3 = gen_reg_rtx (Pmode);
2745 mem = gen_rtx_MEM (Pmode, tmp1);
2746 RTX_UNCHANGING_P (mem) = 1;
2748 first = emit_insn (gen_load_toc_v4_PIC_1b (tempLR, lab,
2749 gsym));
2750 emit_move_insn (tmp1, tempLR);
2751 emit_move_insn (tmp2, mem);
2752 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
2753 last = emit_move_insn (got, tmp3);
2754 REG_NOTES (last) = gen_rtx_EXPR_LIST (REG_EQUAL, gsym,
2755 REG_NOTES (last));
2756 REG_NOTES (first) = gen_rtx_INSN_LIST (REG_LIBCALL, last,
2757 REG_NOTES (first));
2758 REG_NOTES (last) = gen_rtx_INSN_LIST (REG_RETVAL, first,
2759 REG_NOTES (last));
2764 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
2766 r3 = gen_rtx_REG (Pmode, 3);
2767 if (TARGET_64BIT)
2768 insn = gen_tls_gd_64 (r3, got, addr);
2769 else
2770 insn = gen_tls_gd_32 (r3, got, addr);
2771 start_sequence ();
2772 emit_insn (insn);
2773 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
2774 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
2775 insn = emit_call_insn (insn);
2776 CONST_OR_PURE_CALL_P (insn) = 1;
2777 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
2778 insn = get_insns ();
2779 end_sequence ();
2780 emit_libcall_block (insn, dest, r3, addr);
2782 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
2784 r3 = gen_rtx_REG (Pmode, 3);
2785 if (TARGET_64BIT)
2786 insn = gen_tls_ld_64 (r3, got);
2787 else
2788 insn = gen_tls_ld_32 (r3, got);
2789 start_sequence ();
2790 emit_insn (insn);
2791 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
2792 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
2793 insn = emit_call_insn (insn);
2794 CONST_OR_PURE_CALL_P (insn) = 1;
2795 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
2796 insn = get_insns ();
2797 end_sequence ();
2798 tmp1 = gen_reg_rtx (Pmode);
2799 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
2800 UNSPEC_TLSLD);
2801 emit_libcall_block (insn, tmp1, r3, eqv);
2802 if (rs6000_tls_size == 16)
2804 if (TARGET_64BIT)
2805 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
2806 else
2807 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
2809 else if (rs6000_tls_size == 32)
2811 tmp2 = gen_reg_rtx (Pmode);
2812 if (TARGET_64BIT)
2813 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
2814 else
2815 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
2816 emit_insn (insn);
2817 if (TARGET_64BIT)
2818 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
2819 else
2820 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
2822 else
2824 tmp2 = gen_reg_rtx (Pmode);
2825 if (TARGET_64BIT)
2826 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
2827 else
2828 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
2829 emit_insn (insn);
2830 insn = gen_rtx_SET (Pmode, dest,
2831 gen_rtx_PLUS (Pmode, tmp2, tmp1));
2833 emit_insn (insn);
2835 else
2837 /* IE, or 64 bit offset LE. */
2838 tmp2 = gen_reg_rtx (Pmode);
2839 if (TARGET_64BIT)
2840 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
2841 else
2842 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
2843 emit_insn (insn);
2844 if (TARGET_64BIT)
2845 insn = gen_tls_tls_64 (dest, tmp2, addr);
2846 else
2847 insn = gen_tls_tls_32 (dest, tmp2, addr);
2848 emit_insn (insn);
2852 return dest;
2855 /* Return 1 if X is a SYMBOL_REF for a TLS symbol. This is used in
2856 instruction definitions. */
2859 rs6000_tls_symbol_ref (x, mode)
2860 rtx x;
2861 enum machine_mode mode ATTRIBUTE_UNUSED;
2863 return RS6000_SYMBOL_REF_TLS_P (x);
2866 /* Return 1 if X contains a thread-local symbol. */
2868 bool
2869 rs6000_tls_referenced_p (x)
2870 rtx x;
2872 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
2875 /* Return 1 if *X is a thread-local symbol. This is the same as
2876 rs6000_tls_symbol_ref except for the type of the unused argument. */
2878 static inline int
2879 rs6000_tls_symbol_ref_1 (x, data)
2880 rtx *x;
2881 void *data ATTRIBUTE_UNUSED;
2883 return RS6000_SYMBOL_REF_TLS_P (*x);
2886 /* The convention appears to be to define this wherever it is used.
2887 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
2888 is now used here. */
2889 #ifndef REG_MODE_OK_FOR_BASE_P
2890 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
2891 #endif
2893 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
2894 replace the input X, or the original X if no replacement is called for.
2895 The output parameter *WIN is 1 if the calling macro should goto WIN,
2896 0 if it should not.
2898 For RS/6000, we wish to handle large displacements off a base
2899 register by splitting the addend across an addiu/addis and the mem insn.
2900 This cuts number of extra insns needed from 3 to 1.
2902 On Darwin, we use this to generate code for floating point constants.
2903 A movsf_low is generated so we wind up with 2 instructions rather than 3.
2904 The Darwin code is inside #if TARGET_MACHO because only then is
2905 machopic_function_base_name() defined. */
2907 rs6000_legitimize_reload_address (x, mode, opnum, type, ind_levels, win)
2908 rtx x;
2909 enum machine_mode mode;
2910 int opnum;
2911 int type;
2912 int ind_levels ATTRIBUTE_UNUSED;
2913 int *win;
2915 /* We must recognize output that we have already generated ourselves. */
2916 if (GET_CODE (x) == PLUS
2917 && GET_CODE (XEXP (x, 0)) == PLUS
2918 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
2919 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2920 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2922 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2923 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2924 opnum, (enum reload_type)type);
2925 *win = 1;
2926 return x;
2929 #if TARGET_MACHO
2930 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
2931 && GET_CODE (x) == LO_SUM
2932 && GET_CODE (XEXP (x, 0)) == PLUS
2933 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
2934 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
2935 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
2936 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
2937 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
2938 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
2939 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
2941 /* Result of previous invocation of this function on Darwin
2942 floating point constant. */
2943 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2944 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2945 opnum, (enum reload_type)type);
2946 *win = 1;
2947 return x;
2949 #endif
2950 if (GET_CODE (x) == PLUS
2951 && GET_CODE (XEXP (x, 0)) == REG
2952 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2953 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
2954 && GET_CODE (XEXP (x, 1)) == CONST_INT
2955 && !SPE_VECTOR_MODE (mode)
2956 && !ALTIVEC_VECTOR_MODE (mode))
2958 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
2959 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
2960 HOST_WIDE_INT high
2961 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
2963 /* Check for 32-bit overflow. */
2964 if (high + low != val)
2966 *win = 0;
2967 return x;
2970 /* Reload the high part into a base reg; leave the low part
2971 in the mem directly. */
2973 x = gen_rtx_PLUS (GET_MODE (x),
2974 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
2975 GEN_INT (high)),
2976 GEN_INT (low));
2978 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2979 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2980 opnum, (enum reload_type)type);
2981 *win = 1;
2982 return x;
2984 #if TARGET_MACHO
2985 if (GET_CODE (x) == SYMBOL_REF
2986 && DEFAULT_ABI == ABI_DARWIN
2987 && !ALTIVEC_VECTOR_MODE (mode)
2988 && flag_pic)
2990 /* Darwin load of floating point constant. */
2991 rtx offset = gen_rtx (CONST, Pmode,
2992 gen_rtx (MINUS, Pmode, x,
2993 gen_rtx (SYMBOL_REF, Pmode,
2994 machopic_function_base_name ())));
2995 x = gen_rtx (LO_SUM, GET_MODE (x),
2996 gen_rtx (PLUS, Pmode, pic_offset_table_rtx,
2997 gen_rtx (HIGH, Pmode, offset)), offset);
2998 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2999 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3000 opnum, (enum reload_type)type);
3001 *win = 1;
3002 return x;
3004 if (GET_CODE (x) == SYMBOL_REF
3005 && DEFAULT_ABI == ABI_DARWIN
3006 && !ALTIVEC_VECTOR_MODE (mode)
3007 && MACHO_DYNAMIC_NO_PIC_P)
3009 /* Darwin load of floating point constant. */
3010 x = gen_rtx (LO_SUM, GET_MODE (x),
3011 gen_rtx (HIGH, Pmode, x), x);
3012 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3013 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3014 opnum, (enum reload_type)type);
3015 *win = 1;
3016 return x;
3018 #endif
3019 if (TARGET_TOC
3020 && constant_pool_expr_p (x)
3021 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
3023 (x) = create_TOC_reference (x);
3024 *win = 1;
3025 return x;
3027 *win = 0;
3028 return x;
3031 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3032 that is a valid memory address for an instruction.
3033 The MODE argument is the machine mode for the MEM expression
3034 that wants to use this address.
3036 On the RS/6000, there are four valid address: a SYMBOL_REF that
3037 refers to a constant pool entry of an address (or the sum of it
3038 plus a constant), a short (16-bit signed) constant plus a register,
3039 the sum of two registers, or a register indirect, possibly with an
3040 auto-increment. For DFmode and DImode with a constant plus register,
3041 we must ensure that both words are addressable or PowerPC64 with offset
3042 word aligned.
3044 For modes spanning multiple registers (DFmode in 32-bit GPRs,
3045 32-bit DImode, TImode), indexed addressing cannot be used because
3046 adjacent memory cells are accessed by adding word-sized offsets
3047 during assembly output. */
3049 rs6000_legitimate_address (mode, x, reg_ok_strict)
3050 enum machine_mode mode;
3051 rtx x;
3052 int reg_ok_strict;
3054 if (RS6000_SYMBOL_REF_TLS_P (x))
3055 return 0;
3056 if (legitimate_indirect_address_p (x, reg_ok_strict))
3057 return 1;
3058 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
3059 && !ALTIVEC_VECTOR_MODE (mode)
3060 && !SPE_VECTOR_MODE (mode)
3061 && TARGET_UPDATE
3062 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
3063 return 1;
3064 if (legitimate_small_data_p (mode, x))
3065 return 1;
3066 if (legitimate_constant_pool_address_p (x))
3067 return 1;
3068 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
3069 if (! reg_ok_strict
3070 && GET_CODE (x) == PLUS
3071 && GET_CODE (XEXP (x, 0)) == REG
3072 && XEXP (x, 0) == virtual_stack_vars_rtx
3073 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3074 return 1;
3075 if (legitimate_offset_address_p (mode, x, reg_ok_strict))
3076 return 1;
3077 if (mode != TImode
3078 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3079 || TARGET_POWERPC64
3080 || (mode != DFmode && mode != TFmode))
3081 && (TARGET_POWERPC64 || mode != DImode)
3082 && legitimate_indexed_address_p (x, reg_ok_strict))
3083 return 1;
3084 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
3085 return 1;
3086 return 0;
3089 /* Go to LABEL if ADDR (a legitimate address expression)
3090 has an effect that depends on the machine mode it is used for.
3092 On the RS/6000 this is true of all integral offsets (since AltiVec
3093 modes don't allow them) or is a pre-increment or decrement.
3095 ??? Except that due to conceptual problems in offsettable_address_p
3096 we can't really report the problems of integral offsets. So leave
3097 this assuming that the adjustable offset must be valid for the
3098 sub-words of a TFmode operand, which is what we had before. */
3100 bool
3101 rs6000_mode_dependent_address (addr)
3102 rtx addr;
3104 switch (GET_CODE (addr))
3106 case PLUS:
3107 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
3109 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
3110 return val + 12 + 0x8000 >= 0x10000;
3112 break;
3114 case LO_SUM:
3115 return true;
3117 case PRE_INC:
3118 case PRE_DEC:
3119 return TARGET_UPDATE;
3121 default:
3122 break;
3125 return false;
3128 /* Try to output insns to set TARGET equal to the constant C if it can
3129 be done in less than N insns. Do all computations in MODE.
3130 Returns the place where the output has been placed if it can be
3131 done and the insns have been emitted. If it would take more than N
3132 insns, zero is returned and no insns and emitted. */
3135 rs6000_emit_set_const (dest, mode, source, n)
3136 rtx dest, source;
3137 enum machine_mode mode;
3138 int n ATTRIBUTE_UNUSED;
3140 rtx result, insn, set;
3141 HOST_WIDE_INT c0, c1;
3143 if (mode == QImode || mode == HImode)
3145 if (dest == NULL)
3146 dest = gen_reg_rtx (mode);
3147 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
3148 return dest;
3150 else if (mode == SImode)
3152 result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
3154 emit_insn (gen_rtx_SET (VOIDmode, result,
3155 GEN_INT (INTVAL (source)
3156 & (~ (HOST_WIDE_INT) 0xffff))));
3157 emit_insn (gen_rtx_SET (VOIDmode, dest,
3158 gen_rtx_IOR (SImode, result,
3159 GEN_INT (INTVAL (source) & 0xffff))));
3160 result = dest;
3162 else if (mode == DImode)
3164 if (GET_CODE (source) == CONST_INT)
3166 c0 = INTVAL (source);
3167 c1 = -(c0 < 0);
3169 else if (GET_CODE (source) == CONST_DOUBLE)
3171 #if HOST_BITS_PER_WIDE_INT >= 64
3172 c0 = CONST_DOUBLE_LOW (source);
3173 c1 = -(c0 < 0);
3174 #else
3175 c0 = CONST_DOUBLE_LOW (source);
3176 c1 = CONST_DOUBLE_HIGH (source);
3177 #endif
3179 else
3180 abort ();
3182 result = rs6000_emit_set_long_const (dest, c0, c1);
3184 else
3185 abort ();
3187 insn = get_last_insn ();
3188 set = single_set (insn);
3189 if (! CONSTANT_P (SET_SRC (set)))
3190 set_unique_reg_note (insn, REG_EQUAL, source);
3192 return result;
3195 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
3196 fall back to a straight forward decomposition. We do this to avoid
3197 exponential run times encountered when looking for longer sequences
3198 with rs6000_emit_set_const. */
3199 static rtx
3200 rs6000_emit_set_long_const (dest, c1, c2)
3201 rtx dest;
3202 HOST_WIDE_INT c1, c2;
3204 if (!TARGET_POWERPC64)
3206 rtx operand1, operand2;
3208 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
3209 DImode);
3210 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
3211 DImode);
3212 emit_move_insn (operand1, GEN_INT (c1));
3213 emit_move_insn (operand2, GEN_INT (c2));
3215 else
3217 HOST_WIDE_INT ud1, ud2, ud3, ud4;
3219 ud1 = c1 & 0xffff;
3220 ud2 = (c1 & 0xffff0000) >> 16;
3221 #if HOST_BITS_PER_WIDE_INT >= 64
3222 c2 = c1 >> 32;
3223 #endif
3224 ud3 = c2 & 0xffff;
3225 ud4 = (c2 & 0xffff0000) >> 16;
3227 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
3228 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
3230 if (ud1 & 0x8000)
3231 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
3232 else
3233 emit_move_insn (dest, GEN_INT (ud1));
3236 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
3237 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
3239 if (ud2 & 0x8000)
3240 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
3241 - 0x80000000));
3242 else
3243 emit_move_insn (dest, GEN_INT (ud2 << 16));
3244 if (ud1 != 0)
3245 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3247 else if ((ud4 == 0xffff && (ud3 & 0x8000))
3248 || (ud4 == 0 && ! (ud3 & 0x8000)))
3250 if (ud3 & 0x8000)
3251 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
3252 - 0x80000000));
3253 else
3254 emit_move_insn (dest, GEN_INT (ud3 << 16));
3256 if (ud2 != 0)
3257 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
3258 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
3259 if (ud1 != 0)
3260 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3262 else
3264 if (ud4 & 0x8000)
3265 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
3266 - 0x80000000));
3267 else
3268 emit_move_insn (dest, GEN_INT (ud4 << 16));
3270 if (ud3 != 0)
3271 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
3273 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
3274 if (ud2 != 0)
3275 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
3276 GEN_INT (ud2 << 16)));
3277 if (ud1 != 0)
3278 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3281 return dest;
3284 /* Emit a move from SOURCE to DEST in mode MODE. */
3285 void
3286 rs6000_emit_move (dest, source, mode)
3287 rtx dest;
3288 rtx source;
3289 enum machine_mode mode;
3291 rtx operands[2];
3292 operands[0] = dest;
3293 operands[1] = source;
3295 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
3296 if (GET_CODE (operands[1]) == CONST_DOUBLE
3297 && ! FLOAT_MODE_P (mode)
3298 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3300 /* FIXME. This should never happen. */
3301 /* Since it seems that it does, do the safe thing and convert
3302 to a CONST_INT. */
3303 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
3305 if (GET_CODE (operands[1]) == CONST_DOUBLE
3306 && ! FLOAT_MODE_P (mode)
3307 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
3308 && CONST_DOUBLE_LOW (operands[1]) >= 0)
3309 || (CONST_DOUBLE_HIGH (operands[1]) == -1
3310 && CONST_DOUBLE_LOW (operands[1]) < 0)))
3311 abort ();
3313 /* Check if GCC is setting up a block move that will end up using FP
3314 registers as temporaries. We must make sure this is acceptable. */
3315 if (GET_CODE (operands[0]) == MEM
3316 && GET_CODE (operands[1]) == MEM
3317 && mode == DImode
3318 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
3319 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
3320 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
3321 ? 32 : MEM_ALIGN (operands[0])))
3322 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
3323 ? 32
3324 : MEM_ALIGN (operands[1]))))
3325 && ! MEM_VOLATILE_P (operands [0])
3326 && ! MEM_VOLATILE_P (operands [1]))
3328 emit_move_insn (adjust_address (operands[0], SImode, 0),
3329 adjust_address (operands[1], SImode, 0));
3330 emit_move_insn (adjust_address (operands[0], SImode, 4),
3331 adjust_address (operands[1], SImode, 4));
3332 return;
3335 if (!no_new_pseudos)
3337 if (GET_CODE (operands[1]) == MEM && optimize > 0
3338 && (mode == QImode || mode == HImode || mode == SImode)
3339 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
3341 rtx reg = gen_reg_rtx (word_mode);
3343 emit_insn (gen_rtx_SET (word_mode, reg,
3344 gen_rtx_ZERO_EXTEND (word_mode,
3345 operands[1])));
3346 operands[1] = gen_lowpart (mode, reg);
3348 if (GET_CODE (operands[0]) != REG)
3349 operands[1] = force_reg (mode, operands[1]);
3352 if (mode == SFmode && ! TARGET_POWERPC
3353 && TARGET_HARD_FLOAT && TARGET_FPRS
3354 && GET_CODE (operands[0]) == MEM)
3356 int regnum;
3358 if (reload_in_progress || reload_completed)
3359 regnum = true_regnum (operands[1]);
3360 else if (GET_CODE (operands[1]) == REG)
3361 regnum = REGNO (operands[1]);
3362 else
3363 regnum = -1;
3365 /* If operands[1] is a register, on POWER it may have
3366 double-precision data in it, so truncate it to single
3367 precision. */
3368 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
3370 rtx newreg;
3371 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
3372 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
3373 operands[1] = newreg;
3377 /* Recognize the case where operand[1] is a reference to thread-local
3378 data and load its address to a register. */
3379 if (GET_CODE (operands[1]) == SYMBOL_REF)
3381 enum tls_model model = SYMBOL_REF_TLS_MODEL (operands[1]);
3382 if (model != 0)
3383 operands[1] = rs6000_legitimize_tls_address (operands[1], model);
3386 /* Handle the case where reload calls us with an invalid address. */
3387 if (reload_in_progress && mode == Pmode
3388 && (! general_operand (operands[1], mode)
3389 || ! nonimmediate_operand (operands[0], mode)))
3390 goto emit_set;
3392 /* Handle the case of CONSTANT_P_RTX. */
3393 if (GET_CODE (operands[1]) == CONSTANT_P_RTX)
3394 goto emit_set;
3396 /* FIXME: In the long term, this switch statement should go away
3397 and be replaced by a sequence of tests based on things like
3398 mode == Pmode. */
3399 switch (mode)
3401 case HImode:
3402 case QImode:
3403 if (CONSTANT_P (operands[1])
3404 && GET_CODE (operands[1]) != CONST_INT)
3405 operands[1] = force_const_mem (mode, operands[1]);
3406 break;
3408 case TFmode:
3409 case DFmode:
3410 case SFmode:
3411 if (CONSTANT_P (operands[1])
3412 && ! easy_fp_constant (operands[1], mode))
3413 operands[1] = force_const_mem (mode, operands[1]);
3414 break;
3416 case V16QImode:
3417 case V8HImode:
3418 case V4SFmode:
3419 case V4SImode:
3420 case V4HImode:
3421 case V2SFmode:
3422 case V2SImode:
3423 case V1DImode:
3424 if (CONSTANT_P (operands[1])
3425 && !easy_vector_constant (operands[1], mode))
3426 operands[1] = force_const_mem (mode, operands[1]);
3427 break;
3429 case SImode:
3430 case DImode:
3431 /* Use default pattern for address of ELF small data */
3432 if (TARGET_ELF
3433 && mode == Pmode
3434 && DEFAULT_ABI == ABI_V4
3435 && (GET_CODE (operands[1]) == SYMBOL_REF
3436 || GET_CODE (operands[1]) == CONST)
3437 && small_data_operand (operands[1], mode))
3439 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3440 return;
3443 if (DEFAULT_ABI == ABI_V4
3444 && mode == Pmode && mode == SImode
3445 && flag_pic == 1 && got_operand (operands[1], mode))
3447 emit_insn (gen_movsi_got (operands[0], operands[1]));
3448 return;
3451 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
3452 && TARGET_NO_TOC
3453 && ! flag_pic
3454 && mode == Pmode
3455 && CONSTANT_P (operands[1])
3456 && GET_CODE (operands[1]) != HIGH
3457 && GET_CODE (operands[1]) != CONST_INT)
3459 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
3461 /* If this is a function address on -mcall-aixdesc,
3462 convert it to the address of the descriptor. */
3463 if (DEFAULT_ABI == ABI_AIX
3464 && GET_CODE (operands[1]) == SYMBOL_REF
3465 && XSTR (operands[1], 0)[0] == '.')
3467 const char *name = XSTR (operands[1], 0);
3468 rtx new_ref;
3469 while (*name == '.')
3470 name++;
3471 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
3472 CONSTANT_POOL_ADDRESS_P (new_ref)
3473 = CONSTANT_POOL_ADDRESS_P (operands[1]);
3474 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
3475 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
3476 SYMBOL_REF_DECL (new_ref) = SYMBOL_REF_DECL (operands[1]);
3477 operands[1] = new_ref;
3480 if (DEFAULT_ABI == ABI_DARWIN)
3482 #if TARGET_MACHO
3483 if (MACHO_DYNAMIC_NO_PIC_P)
3485 /* Take care of any required data indirection. */
3486 operands[1] = rs6000_machopic_legitimize_pic_address (
3487 operands[1], mode, operands[0]);
3488 if (operands[0] != operands[1])
3489 emit_insn (gen_rtx_SET (VOIDmode,
3490 operands[0], operands[1]));
3491 return;
3493 #endif
3494 emit_insn (gen_macho_high (target, operands[1]));
3495 emit_insn (gen_macho_low (operands[0], target, operands[1]));
3496 return;
3499 emit_insn (gen_elf_high (target, operands[1]));
3500 emit_insn (gen_elf_low (operands[0], target, operands[1]));
3501 return;
3504 /* If this is a SYMBOL_REF that refers to a constant pool entry,
3505 and we have put it in the TOC, we just need to make a TOC-relative
3506 reference to it. */
3507 if (TARGET_TOC
3508 && GET_CODE (operands[1]) == SYMBOL_REF
3509 && constant_pool_expr_p (operands[1])
3510 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
3511 get_pool_mode (operands[1])))
3513 operands[1] = create_TOC_reference (operands[1]);
3515 else if (mode == Pmode
3516 && CONSTANT_P (operands[1])
3517 && ((GET_CODE (operands[1]) != CONST_INT
3518 && ! easy_fp_constant (operands[1], mode))
3519 || (GET_CODE (operands[1]) == CONST_INT
3520 && num_insns_constant (operands[1], mode) > 2)
3521 || (GET_CODE (operands[0]) == REG
3522 && FP_REGNO_P (REGNO (operands[0]))))
3523 && GET_CODE (operands[1]) != HIGH
3524 && ! legitimate_constant_pool_address_p (operands[1])
3525 && ! toc_relative_expr_p (operands[1]))
3527 /* Emit a USE operation so that the constant isn't deleted if
3528 expensive optimizations are turned on because nobody
3529 references it. This should only be done for operands that
3530 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
3531 This should not be done for operands that contain LABEL_REFs.
3532 For now, we just handle the obvious case. */
3533 if (GET_CODE (operands[1]) != LABEL_REF)
3534 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
3536 #if TARGET_MACHO
3537 /* Darwin uses a special PIC legitimizer. */
3538 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
3540 operands[1] =
3541 rs6000_machopic_legitimize_pic_address (operands[1], mode,
3542 operands[0]);
3543 if (operands[0] != operands[1])
3544 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3545 return;
3547 #endif
3549 /* If we are to limit the number of things we put in the TOC and
3550 this is a symbol plus a constant we can add in one insn,
3551 just put the symbol in the TOC and add the constant. Don't do
3552 this if reload is in progress. */
3553 if (GET_CODE (operands[1]) == CONST
3554 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
3555 && GET_CODE (XEXP (operands[1], 0)) == PLUS
3556 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
3557 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
3558 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
3559 && ! side_effects_p (operands[0]))
3561 rtx sym =
3562 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
3563 rtx other = XEXP (XEXP (operands[1], 0), 1);
3565 sym = force_reg (mode, sym);
3566 if (mode == SImode)
3567 emit_insn (gen_addsi3 (operands[0], sym, other));
3568 else
3569 emit_insn (gen_adddi3 (operands[0], sym, other));
3570 return;
3573 operands[1] = force_const_mem (mode, operands[1]);
3575 if (TARGET_TOC
3576 && constant_pool_expr_p (XEXP (operands[1], 0))
3577 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
3578 get_pool_constant (XEXP (operands[1], 0)),
3579 get_pool_mode (XEXP (operands[1], 0))))
3581 operands[1]
3582 = gen_rtx_MEM (mode,
3583 create_TOC_reference (XEXP (operands[1], 0)));
3584 set_mem_alias_set (operands[1], get_TOC_alias_set ());
3585 RTX_UNCHANGING_P (operands[1]) = 1;
3588 break;
3590 case TImode:
3591 if (GET_CODE (operands[0]) == MEM
3592 && GET_CODE (XEXP (operands[0], 0)) != REG
3593 && ! reload_in_progress)
3594 operands[0]
3595 = replace_equiv_address (operands[0],
3596 copy_addr_to_reg (XEXP (operands[0], 0)));
3598 if (GET_CODE (operands[1]) == MEM
3599 && GET_CODE (XEXP (operands[1], 0)) != REG
3600 && ! reload_in_progress)
3601 operands[1]
3602 = replace_equiv_address (operands[1],
3603 copy_addr_to_reg (XEXP (operands[1], 0)));
3604 if (TARGET_POWER)
3606 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3607 gen_rtvec (2,
3608 gen_rtx_SET (VOIDmode,
3609 operands[0], operands[1]),
3610 gen_rtx_CLOBBER (VOIDmode,
3611 gen_rtx_SCRATCH (SImode)))));
3612 return;
3614 break;
3616 default:
3617 abort ();
3620 /* Above, we may have called force_const_mem which may have returned
3621 an invalid address. If we can, fix this up; otherwise, reload will
3622 have to deal with it. */
3623 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
3624 operands[1] = validize_mem (operands[1]);
3626 emit_set:
3627 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3630 /* Initialize a variable CUM of type CUMULATIVE_ARGS
3631 for a call to a function whose data type is FNTYPE.
3632 For a library call, FNTYPE is 0.
3634 For incoming args we set the number of arguments in the prototype large
3635 so we never return a PARALLEL. */
3637 void
3638 init_cumulative_args (cum, fntype, libname, incoming)
3639 CUMULATIVE_ARGS *cum;
3640 tree fntype;
3641 rtx libname ATTRIBUTE_UNUSED;
3642 int incoming;
3644 static CUMULATIVE_ARGS zero_cumulative;
3646 *cum = zero_cumulative;
3647 cum->words = 0;
3648 cum->fregno = FP_ARG_MIN_REG;
3649 cum->vregno = ALTIVEC_ARG_MIN_REG;
3650 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
3651 cum->call_cookie = CALL_NORMAL;
3652 cum->sysv_gregno = GP_ARG_MIN_REG;
3654 if (incoming)
3655 cum->nargs_prototype = 1000; /* don't return a PARALLEL */
3657 else if (cum->prototype)
3658 cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
3659 + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
3660 || RETURN_IN_MEMORY (TREE_TYPE (fntype))));
3662 else
3663 cum->nargs_prototype = 0;
3665 cum->orig_nargs = cum->nargs_prototype;
3667 /* Check for a longcall attribute. */
3668 if (fntype
3669 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
3670 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
3671 cum->call_cookie = CALL_LONG;
3673 if (TARGET_DEBUG_ARG)
3675 fprintf (stderr, "\ninit_cumulative_args:");
3676 if (fntype)
3678 tree ret_type = TREE_TYPE (fntype);
3679 fprintf (stderr, " ret code = %s,",
3680 tree_code_name[ (int)TREE_CODE (ret_type) ]);
3683 if (cum->call_cookie & CALL_LONG)
3684 fprintf (stderr, " longcall,");
3686 fprintf (stderr, " proto = %d, nargs = %d\n",
3687 cum->prototype, cum->nargs_prototype);
3691 /* If defined, a C expression which determines whether, and in which
3692 direction, to pad out an argument with extra space. The value
3693 should be of type `enum direction': either `upward' to pad above
3694 the argument, `downward' to pad below, or `none' to inhibit
3695 padding.
3697 For the AIX ABI structs are always stored left shifted in their
3698 argument slot. */
3700 enum direction
3701 function_arg_padding (mode, type)
3702 enum machine_mode mode;
3703 tree type;
3705 if (type != 0 && AGGREGATE_TYPE_P (type))
3706 return upward;
3708 /* This is the default definition. */
3709 return (! BYTES_BIG_ENDIAN
3710 ? upward
3711 : ((mode == BLKmode
3712 ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
3713 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
3714 : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
3715 ? downward : upward));
3718 /* If defined, a C expression that gives the alignment boundary, in bits,
3719 of an argument with the specified mode and type. If it is not defined,
3720 PARM_BOUNDARY is used for all arguments.
3722 V.4 wants long longs to be double word aligned. */
3725 function_arg_boundary (mode, type)
3726 enum machine_mode mode;
3727 tree type ATTRIBUTE_UNUSED;
3729 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
3730 return 64;
3731 else if (SPE_VECTOR_MODE (mode))
3732 return 64;
3733 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3734 return 128;
3735 else
3736 return PARM_BOUNDARY;
3739 /* Update the data in CUM to advance over an argument
3740 of mode MODE and data type TYPE.
3741 (TYPE is null for libcalls where that information may not be available.) */
3743 void
3744 function_arg_advance (cum, mode, type, named)
3745 CUMULATIVE_ARGS *cum;
3746 enum machine_mode mode;
3747 tree type;
3748 int named;
3750 cum->nargs_prototype--;
3752 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3754 if (cum->vregno <= ALTIVEC_ARG_MAX_REG && cum->nargs_prototype >= 0)
3755 cum->vregno++;
3756 else
3757 cum->words += RS6000_ARG_SIZE (mode, type);
3759 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
3760 && named && cum->sysv_gregno <= GP_ARG_MAX_REG)
3761 cum->sysv_gregno++;
3762 else if (DEFAULT_ABI == ABI_V4)
3764 if (TARGET_HARD_FLOAT && TARGET_FPRS
3765 && (mode == SFmode || mode == DFmode))
3767 if (cum->fregno <= FP_ARG_V4_MAX_REG)
3768 cum->fregno++;
3769 else
3771 if (mode == DFmode)
3772 cum->words += cum->words & 1;
3773 cum->words += RS6000_ARG_SIZE (mode, type);
3776 else
3778 int n_words;
3779 int gregno = cum->sysv_gregno;
3781 /* Aggregates and IEEE quad get passed by reference. */
3782 if ((type && AGGREGATE_TYPE_P (type))
3783 || mode == TFmode)
3784 n_words = 1;
3785 else
3786 n_words = RS6000_ARG_SIZE (mode, type);
3788 /* Long long and SPE vectors are put in odd registers. */
3789 if (n_words == 2 && (gregno & 1) == 0)
3790 gregno += 1;
3792 /* Long long and SPE vectors are not split between registers
3793 and stack. */
3794 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
3796 /* Long long is aligned on the stack. */
3797 if (n_words == 2)
3798 cum->words += cum->words & 1;
3799 cum->words += n_words;
3802 /* Note: continuing to accumulate gregno past when we've started
3803 spilling to the stack indicates the fact that we've started
3804 spilling to the stack to expand_builtin_saveregs. */
3805 cum->sysv_gregno = gregno + n_words;
3808 if (TARGET_DEBUG_ARG)
3810 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3811 cum->words, cum->fregno);
3812 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
3813 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
3814 fprintf (stderr, "mode = %4s, named = %d\n",
3815 GET_MODE_NAME (mode), named);
3818 else
3820 int align = (TARGET_32BIT && (cum->words & 1) != 0
3821 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3823 cum->words += align + RS6000_ARG_SIZE (mode, type);
3825 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3826 && TARGET_HARD_FLOAT && TARGET_FPRS)
3827 cum->fregno += (mode == TFmode ? 2 : 1);
3829 if (TARGET_DEBUG_ARG)
3831 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3832 cum->words, cum->fregno);
3833 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
3834 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
3835 fprintf (stderr, "named = %d, align = %d\n", named, align);
3840 /* Determine where to put an argument to a function.
3841 Value is zero to push the argument on the stack,
3842 or a hard register in which to store the argument.
3844 MODE is the argument's machine mode.
3845 TYPE is the data type of the argument (as a tree).
3846 This is null for libcalls where that information may
3847 not be available.
3848 CUM is a variable of type CUMULATIVE_ARGS which gives info about
3849 the preceding args and about the function being called.
3850 NAMED is nonzero if this argument is a named parameter
3851 (otherwise it is an extra parameter matching an ellipsis).
3853 On RS/6000 the first eight words of non-FP are normally in registers
3854 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
3855 Under V.4, the first 8 FP args are in registers.
3857 If this is floating-point and no prototype is specified, we use
3858 both an FP and integer register (or possibly FP reg and stack). Library
3859 functions (when TYPE is zero) always have the proper types for args,
3860 so we can pass the FP value just in one register. emit_library_function
3861 doesn't support PARALLEL anyway. */
3863 struct rtx_def *
3864 function_arg (cum, mode, type, named)
3865 CUMULATIVE_ARGS *cum;
3866 enum machine_mode mode;
3867 tree type;
3868 int named;
3870 enum rs6000_abi abi = DEFAULT_ABI;
3872 /* Return a marker to indicate whether CR1 needs to set or clear the
3873 bit that V.4 uses to say fp args were passed in registers.
3874 Assume that we don't need the marker for software floating point,
3875 or compiler generated library calls. */
3876 if (mode == VOIDmode)
3878 if (abi == ABI_V4
3879 && cum->nargs_prototype < 0
3880 && type && (cum->prototype || TARGET_NO_PROTOTYPE))
3882 /* For the SPE, we need to crxor CR6 always. */
3883 if (TARGET_SPE_ABI)
3884 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
3885 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
3886 return GEN_INT (cum->call_cookie
3887 | ((cum->fregno == FP_ARG_MIN_REG)
3888 ? CALL_V4_SET_FP_ARGS
3889 : CALL_V4_CLEAR_FP_ARGS));
3892 return GEN_INT (cum->call_cookie);
3895 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3897 if (named && cum->vregno <= ALTIVEC_ARG_MAX_REG)
3898 return gen_rtx_REG (mode, cum->vregno);
3899 else
3900 return NULL;
3902 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode) && named)
3904 if (cum->sysv_gregno <= GP_ARG_MAX_REG)
3905 return gen_rtx_REG (mode, cum->sysv_gregno);
3906 else
3907 return NULL;
3909 else if (abi == ABI_V4)
3911 if (TARGET_HARD_FLOAT && TARGET_FPRS
3912 && (mode == SFmode || mode == DFmode))
3914 if (cum->fregno <= FP_ARG_V4_MAX_REG)
3915 return gen_rtx_REG (mode, cum->fregno);
3916 else
3917 return NULL;
3919 else
3921 int n_words;
3922 int gregno = cum->sysv_gregno;
3924 /* Aggregates and IEEE quad get passed by reference. */
3925 if ((type && AGGREGATE_TYPE_P (type))
3926 || mode == TFmode)
3927 n_words = 1;
3928 else
3929 n_words = RS6000_ARG_SIZE (mode, type);
3931 /* Long long and SPE vectors are put in odd registers. */
3932 if (n_words == 2 && (gregno & 1) == 0)
3933 gregno += 1;
3935 /* Long long and SPE vectors are not split between registers
3936 and stack. */
3937 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
3939 /* SPE vectors in ... get split into 2 registers. */
3940 if (TARGET_SPE && TARGET_SPE_ABI
3941 && SPE_VECTOR_MODE (mode) && !named)
3943 rtx r1, r2;
3944 enum machine_mode m = SImode;
3946 r1 = gen_rtx_REG (m, gregno);
3947 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
3948 r2 = gen_rtx_REG (m, gregno + 1);
3949 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
3950 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
3952 return gen_rtx_REG (mode, gregno);
3954 else
3955 return NULL;
3958 else
3960 int align = (TARGET_32BIT && (cum->words & 1) != 0
3961 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3962 int align_words = cum->words + align;
3964 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3965 return NULL_RTX;
3967 if (USE_FP_FOR_ARG_P (*cum, mode, type))
3969 if (! type
3970 || ((cum->nargs_prototype > 0)
3971 /* IBM AIX extended its linkage convention definition always
3972 to require FP args after register save area hole on the
3973 stack. */
3974 && (DEFAULT_ABI != ABI_AIX
3975 || ! TARGET_XL_CALL
3976 || (align_words < GP_ARG_NUM_REG))))
3977 return gen_rtx_REG (mode, cum->fregno);
3979 return gen_rtx_PARALLEL (mode,
3980 gen_rtvec (2,
3981 gen_rtx_EXPR_LIST (VOIDmode,
3982 ((align_words >= GP_ARG_NUM_REG)
3983 ? NULL_RTX
3984 : (align_words
3985 + RS6000_ARG_SIZE (mode, type)
3986 > GP_ARG_NUM_REG
3987 /* If this is partially on the stack, then
3988 we only include the portion actually
3989 in registers here. */
3990 ? gen_rtx_REG (SImode,
3991 GP_ARG_MIN_REG + align_words)
3992 : gen_rtx_REG (mode,
3993 GP_ARG_MIN_REG + align_words))),
3994 const0_rtx),
3995 gen_rtx_EXPR_LIST (VOIDmode,
3996 gen_rtx_REG (mode, cum->fregno),
3997 const0_rtx)));
3999 else if (align_words < GP_ARG_NUM_REG)
4000 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4001 else
4002 return NULL_RTX;
4006 /* For an arg passed partly in registers and partly in memory,
4007 this is the number of registers used.
4008 For args passed entirely in registers or entirely in memory, zero. */
4011 function_arg_partial_nregs (cum, mode, type, named)
4012 CUMULATIVE_ARGS *cum;
4013 enum machine_mode mode;
4014 tree type;
4015 int named ATTRIBUTE_UNUSED;
4017 if (DEFAULT_ABI == ABI_V4)
4018 return 0;
4020 if (USE_FP_FOR_ARG_P (*cum, mode, type)
4021 || USE_ALTIVEC_FOR_ARG_P (*cum, mode, type))
4023 if (cum->nargs_prototype >= 0)
4024 return 0;
4027 if (cum->words < GP_ARG_NUM_REG
4028 && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
4030 int ret = GP_ARG_NUM_REG - cum->words;
4031 if (ret && TARGET_DEBUG_ARG)
4032 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
4034 return ret;
4037 return 0;
4040 /* A C expression that indicates when an argument must be passed by
4041 reference. If nonzero for an argument, a copy of that argument is
4042 made in memory and a pointer to the argument is passed instead of
4043 the argument itself. The pointer is passed in whatever way is
4044 appropriate for passing a pointer to that type.
4046 Under V.4, structures and unions are passed by reference.
4048 As an extension to all ABIs, variable sized types are passed by
4049 reference. */
4052 function_arg_pass_by_reference (cum, mode, type, named)
4053 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
4054 enum machine_mode mode ATTRIBUTE_UNUSED;
4055 tree type;
4056 int named ATTRIBUTE_UNUSED;
4058 if (DEFAULT_ABI == ABI_V4
4059 && ((type && AGGREGATE_TYPE_P (type))
4060 || mode == TFmode))
4062 if (TARGET_DEBUG_ARG)
4063 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
4065 return 1;
4067 return type && int_size_in_bytes (type) <= 0;
4070 /* Perform any needed actions needed for a function that is receiving a
4071 variable number of arguments.
4073 CUM is as above.
4075 MODE and TYPE are the mode and type of the current parameter.
4077 PRETEND_SIZE is a variable that should be set to the amount of stack
4078 that must be pushed by the prolog to pretend that our caller pushed
4081 Normally, this macro will push all remaining incoming registers on the
4082 stack and set PRETEND_SIZE to the length of the registers pushed. */
4084 void
4085 setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
4086 CUMULATIVE_ARGS *cum;
4087 enum machine_mode mode;
4088 tree type;
4089 int *pretend_size ATTRIBUTE_UNUSED;
4090 int no_rtl;
4093 CUMULATIVE_ARGS next_cum;
4094 int reg_size = TARGET_32BIT ? 4 : 8;
4095 rtx save_area = NULL_RTX, mem;
4096 int first_reg_offset, set;
4097 tree fntype;
4098 int stdarg_p;
4100 fntype = TREE_TYPE (current_function_decl);
4101 stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
4102 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4103 != void_type_node));
4105 /* For varargs, we do not want to skip the dummy va_dcl argument.
4106 For stdargs, we do want to skip the last named argument. */
4107 next_cum = *cum;
4108 if (stdarg_p)
4109 function_arg_advance (&next_cum, mode, type, 1);
4111 if (DEFAULT_ABI == ABI_V4)
4113 /* Indicate to allocate space on the stack for varargs save area. */
4114 cfun->machine->sysv_varargs_p = 1;
4115 if (! no_rtl)
4116 save_area = plus_constant (virtual_stack_vars_rtx,
4117 - RS6000_VARARGS_SIZE);
4119 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
4121 else
4123 first_reg_offset = next_cum.words;
4124 save_area = virtual_incoming_args_rtx;
4125 cfun->machine->sysv_varargs_p = 0;
4127 if (MUST_PASS_IN_STACK (mode, type))
4128 first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
4131 set = get_varargs_alias_set ();
4132 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
4134 mem = gen_rtx_MEM (BLKmode,
4135 plus_constant (save_area,
4136 first_reg_offset * reg_size)),
4137 set_mem_alias_set (mem, set);
4138 set_mem_align (mem, BITS_PER_WORD);
4140 move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
4141 GP_ARG_NUM_REG - first_reg_offset);
4144 /* Save FP registers if needed. */
4145 if (DEFAULT_ABI == ABI_V4
4146 && TARGET_HARD_FLOAT && TARGET_FPRS
4147 && ! no_rtl
4148 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
4150 int fregno = next_cum.fregno;
4151 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
4152 rtx lab = gen_label_rtx ();
4153 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
4155 emit_jump_insn (gen_rtx_SET (VOIDmode,
4156 pc_rtx,
4157 gen_rtx_IF_THEN_ELSE (VOIDmode,
4158 gen_rtx_NE (VOIDmode, cr1,
4159 const0_rtx),
4160 gen_rtx_LABEL_REF (VOIDmode, lab),
4161 pc_rtx)));
4163 while (fregno <= FP_ARG_V4_MAX_REG)
4165 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
4166 set_mem_alias_set (mem, set);
4167 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
4168 fregno++;
4169 off += 8;
4172 emit_label (lab);
4176 /* Create the va_list data type. */
4178 tree
4179 rs6000_build_va_list ()
4181 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
4183 /* For AIX, prefer 'char *' because that's what the system
4184 header files like. */
4185 if (DEFAULT_ABI != ABI_V4)
4186 return build_pointer_type (char_type_node);
4188 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
4189 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
4191 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
4192 unsigned_char_type_node);
4193 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
4194 unsigned_char_type_node);
4195 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
4196 ptr_type_node);
4197 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
4198 ptr_type_node);
4200 DECL_FIELD_CONTEXT (f_gpr) = record;
4201 DECL_FIELD_CONTEXT (f_fpr) = record;
4202 DECL_FIELD_CONTEXT (f_ovf) = record;
4203 DECL_FIELD_CONTEXT (f_sav) = record;
4205 TREE_CHAIN (record) = type_decl;
4206 TYPE_NAME (record) = type_decl;
4207 TYPE_FIELDS (record) = f_gpr;
4208 TREE_CHAIN (f_gpr) = f_fpr;
4209 TREE_CHAIN (f_fpr) = f_ovf;
4210 TREE_CHAIN (f_ovf) = f_sav;
4212 layout_type (record);
4214 /* The correct type is an array type of one element. */
4215 return build_array_type (record, build_index_type (size_zero_node));
4218 /* Implement va_start. */
4220 void
4221 rs6000_va_start (valist, nextarg)
4222 tree valist;
4223 rtx nextarg;
4225 HOST_WIDE_INT words, n_gpr, n_fpr;
4226 tree f_gpr, f_fpr, f_ovf, f_sav;
4227 tree gpr, fpr, ovf, sav, t;
4229 /* Only SVR4 needs something special. */
4230 if (DEFAULT_ABI != ABI_V4)
4232 std_expand_builtin_va_start (valist, nextarg);
4233 return;
4236 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
4237 f_fpr = TREE_CHAIN (f_gpr);
4238 f_ovf = TREE_CHAIN (f_fpr);
4239 f_sav = TREE_CHAIN (f_ovf);
4241 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
4242 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
4243 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4244 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4245 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4247 /* Count number of gp and fp argument registers used. */
4248 words = current_function_args_info.words;
4249 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
4250 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
4252 if (TARGET_DEBUG_ARG)
4253 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
4254 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
4255 words, n_gpr, n_fpr);
4257 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
4258 TREE_SIDE_EFFECTS (t) = 1;
4259 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4261 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
4262 TREE_SIDE_EFFECTS (t) = 1;
4263 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4265 /* Find the overflow area. */
4266 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
4267 if (words != 0)
4268 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
4269 build_int_2 (words * UNITS_PER_WORD, 0));
4270 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
4271 TREE_SIDE_EFFECTS (t) = 1;
4272 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4274 /* Find the register save area. */
4275 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
4276 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
4277 build_int_2 (-RS6000_VARARGS_SIZE, -1));
4278 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
4279 TREE_SIDE_EFFECTS (t) = 1;
4280 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4283 /* Implement va_arg. */
4286 rs6000_va_arg (valist, type)
4287 tree valist, type;
4289 tree f_gpr, f_fpr, f_ovf, f_sav;
4290 tree gpr, fpr, ovf, sav, reg, t, u;
4291 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
4292 rtx lab_false, lab_over, addr_rtx, r;
4294 if (DEFAULT_ABI != ABI_V4)
4296 /* Variable sized types are passed by reference. */
4297 if (int_size_in_bytes (type) <= 0)
4299 u = build_pointer_type (type);
4301 /* Args grow upward. */
4302 t = build (POSTINCREMENT_EXPR, TREE_TYPE (valist), valist,
4303 build_int_2 (POINTER_SIZE / BITS_PER_UNIT, 0));
4304 TREE_SIDE_EFFECTS (t) = 1;
4306 t = build1 (NOP_EXPR, build_pointer_type (u), t);
4307 TREE_SIDE_EFFECTS (t) = 1;
4309 t = build1 (INDIRECT_REF, u, t);
4310 TREE_SIDE_EFFECTS (t) = 1;
4312 return expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4314 else
4315 return std_expand_builtin_va_arg (valist, type);
4318 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
4319 f_fpr = TREE_CHAIN (f_gpr);
4320 f_ovf = TREE_CHAIN (f_fpr);
4321 f_sav = TREE_CHAIN (f_ovf);
4323 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
4324 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
4325 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4326 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4327 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4329 size = int_size_in_bytes (type);
4330 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
4332 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
4334 /* Aggregates and long doubles are passed by reference. */
4335 indirect_p = 1;
4336 reg = gpr;
4337 n_reg = 1;
4338 sav_ofs = 0;
4339 sav_scale = 4;
4340 size = UNITS_PER_WORD;
4341 rsize = 1;
4343 else if (FLOAT_TYPE_P (type) && TARGET_HARD_FLOAT && TARGET_FPRS)
4345 /* FP args go in FP registers, if present. */
4346 indirect_p = 0;
4347 reg = fpr;
4348 n_reg = 1;
4349 sav_ofs = 8*4;
4350 sav_scale = 8;
4352 else
4354 /* Otherwise into GP registers. */
4355 indirect_p = 0;
4356 reg = gpr;
4357 n_reg = rsize;
4358 sav_ofs = 0;
4359 sav_scale = 4;
4362 /* Pull the value out of the saved registers ... */
4364 lab_false = gen_label_rtx ();
4365 lab_over = gen_label_rtx ();
4366 addr_rtx = gen_reg_rtx (Pmode);
4368 /* AltiVec vectors never go in registers. */
4369 if (!TARGET_ALTIVEC || TREE_CODE (type) != VECTOR_TYPE)
4371 TREE_THIS_VOLATILE (reg) = 1;
4372 emit_cmp_and_jump_insns
4373 (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
4374 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
4375 lab_false);
4377 /* Long long is aligned in the registers. */
4378 if (n_reg > 1)
4380 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
4381 build_int_2 (n_reg - 1, 0));
4382 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
4383 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
4384 TREE_SIDE_EFFECTS (u) = 1;
4385 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
4388 if (sav_ofs)
4389 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
4390 else
4391 t = sav;
4393 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
4394 build_int_2 (n_reg, 0));
4395 TREE_SIDE_EFFECTS (u) = 1;
4397 u = build1 (CONVERT_EXPR, integer_type_node, u);
4398 TREE_SIDE_EFFECTS (u) = 1;
4400 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
4401 TREE_SIDE_EFFECTS (u) = 1;
4403 t = build (PLUS_EXPR, ptr_type_node, t, u);
4404 TREE_SIDE_EFFECTS (t) = 1;
4406 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
4407 if (r != addr_rtx)
4408 emit_move_insn (addr_rtx, r);
4410 emit_jump_insn (gen_jump (lab_over));
4411 emit_barrier ();
4414 emit_label (lab_false);
4416 /* ... otherwise out of the overflow area. */
4418 /* Make sure we don't find reg 7 for the next int arg.
4420 All AltiVec vectors go in the overflow area. So in the AltiVec
4421 case we need to get the vectors from the overflow area, but
4422 remember where the GPRs and FPRs are. */
4423 if (n_reg > 1 && (TREE_CODE (type) != VECTOR_TYPE
4424 || !TARGET_ALTIVEC))
4426 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
4427 TREE_SIDE_EFFECTS (t) = 1;
4428 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4431 /* Care for on-stack alignment if needed. */
4432 if (rsize <= 1)
4433 t = ovf;
4434 else
4436 int align;
4438 /* AltiVec vectors are 16 byte aligned. */
4439 if (TARGET_ALTIVEC && TREE_CODE (type) == VECTOR_TYPE)
4440 align = 15;
4441 else
4442 align = 7;
4444 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
4445 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
4447 t = save_expr (t);
4449 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
4450 if (r != addr_rtx)
4451 emit_move_insn (addr_rtx, r);
4453 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
4454 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
4455 TREE_SIDE_EFFECTS (t) = 1;
4456 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4458 emit_label (lab_over);
4460 if (indirect_p)
4462 r = gen_rtx_MEM (Pmode, addr_rtx);
4463 set_mem_alias_set (r, get_varargs_alias_set ());
4464 emit_move_insn (addr_rtx, r);
4467 return addr_rtx;
4470 /* Builtins. */
4472 #define def_builtin(MASK, NAME, TYPE, CODE) \
4473 do { \
4474 if ((MASK) & target_flags) \
4475 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
4476 NULL, NULL_TREE); \
4477 } while (0)
4479 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
4481 static const struct builtin_description bdesc_3arg[] =
4483 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
4484 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
4485 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
4486 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
4487 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
4488 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
4489 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
4490 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
4491 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
4492 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
4493 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
4494 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
4495 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
4496 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
4497 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
4498 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
4499 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
4500 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
4501 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
4502 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
4503 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
4504 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
4505 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
4508 /* DST operations: void foo (void *, const int, const char). */
4510 static const struct builtin_description bdesc_dst[] =
4512 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
4513 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
4514 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
4515 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
4518 /* Simple binary operations: VECc = foo (VECa, VECb). */
4520 static struct builtin_description bdesc_2arg[] =
4522 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
4523 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
4524 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
4525 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
4526 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
4527 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
4528 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
4529 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
4530 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
4531 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
4532 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
4533 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
4534 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
4535 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
4536 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
4537 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
4538 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
4539 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
4540 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
4541 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
4542 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
4543 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
4544 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
4545 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
4546 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
4547 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
4548 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
4549 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
4550 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
4551 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
4552 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
4553 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
4554 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
4555 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
4556 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
4557 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
4558 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
4559 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
4560 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
4561 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
4562 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
4563 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
4564 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
4565 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
4566 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
4567 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
4568 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
4569 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
4570 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
4571 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
4572 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
4573 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
4574 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
4575 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
4576 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
4577 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
4578 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
4579 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
4580 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
4581 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
4582 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
4583 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
4584 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
4585 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
4586 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
4587 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
4588 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
4589 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
4590 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
4591 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
4592 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
4593 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
4594 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
4595 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
4596 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
4597 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
4598 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
4599 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
4600 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
4601 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
4602 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
4603 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
4604 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
4605 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
4606 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
4607 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
4608 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
4609 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
4610 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
4611 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
4612 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
4613 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
4614 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
4615 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
4616 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
4617 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
4618 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
4619 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
4620 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
4621 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
4622 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
4623 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
4624 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
4625 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
4626 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
4627 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
4628 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
4629 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
4630 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
4631 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
4632 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
4633 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
4634 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
4636 /* Place holder, leave as first spe builtin. */
4637 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
4638 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
4639 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
4640 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
4641 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
4642 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
4643 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
4644 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
4645 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
4646 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
4647 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
4648 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
4649 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
4650 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
4651 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
4652 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
4653 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
4654 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
4655 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
4656 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
4657 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
4658 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
4659 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
4660 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
4661 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
4662 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
4663 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
4664 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
4665 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
4666 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
4667 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
4668 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
4669 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
4670 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
4671 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
4672 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
4673 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
4674 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
4675 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
4676 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
4677 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
4678 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
4679 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
4680 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
4681 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
4682 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
4683 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
4684 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
4685 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
4686 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
4687 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
4688 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
4689 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
4690 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
4691 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
4692 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
4693 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
4694 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
4695 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
4696 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
4697 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
4698 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
4699 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
4700 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
4701 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
4702 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
4703 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
4704 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
4705 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
4706 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
4707 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
4708 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
4709 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
4710 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
4711 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
4712 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
4713 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
4714 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
4715 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
4716 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
4717 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
4718 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
4719 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
4720 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
4721 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
4722 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
4723 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
4724 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
4725 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
4726 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
4727 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
4728 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
4729 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
4730 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
4731 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
4732 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
4733 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
4734 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
4735 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
4736 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
4737 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
4738 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
4739 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
4740 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
4741 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
4742 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
4743 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
4744 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
4745 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
4747 /* SPE binary operations expecting a 5-bit unsigned literal. */
4748 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
4750 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
4751 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
4752 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
4753 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
4754 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
4755 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
4756 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
4757 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
4758 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
4759 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
4760 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
4761 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
4762 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
4763 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
4764 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
4765 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
4766 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
4767 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
4768 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
4769 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
4770 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
4771 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
4772 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
4773 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
4774 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
4775 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
4777 /* Place-holder. Leave as last binary SPE builtin. */
4778 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR },
4781 /* AltiVec predicates. */
4783 struct builtin_description_predicates
4785 const unsigned int mask;
4786 const enum insn_code icode;
4787 const char *opcode;
4788 const char *const name;
4789 const enum rs6000_builtins code;
4792 static const struct builtin_description_predicates bdesc_altivec_preds[] =
4794 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
4795 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
4796 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
4797 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
4798 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
4799 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
4800 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
4801 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
4802 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
4803 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
4804 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
4805 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
4806 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
4809 /* SPE predicates. */
4810 static struct builtin_description bdesc_spe_predicates[] =
4812 /* Place-holder. Leave as first. */
4813 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
4814 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
4815 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
4816 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
4817 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
4818 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
4819 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
4820 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
4821 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
4822 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
4823 /* Place-holder. Leave as last. */
4824 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
4827 /* SPE evsel predicates. */
4828 static struct builtin_description bdesc_spe_evsel[] =
4830 /* Place-holder. Leave as first. */
4831 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
4832 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
4833 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
4834 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
4835 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
4836 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
4837 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
4838 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
4839 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
4840 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
4841 /* Place-holder. Leave as last. */
4842 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
4845 /* ABS* operations. */
4847 static const struct builtin_description bdesc_abs[] =
4849 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
4850 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
4851 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
4852 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
4853 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
4854 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
4855 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
4858 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
4859 foo (VECa). */
4861 static struct builtin_description bdesc_1arg[] =
4863 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
4864 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
4865 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
4866 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
4867 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
4868 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
4869 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
4870 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
4871 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
4872 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
4873 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
4874 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
4875 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
4876 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
4877 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
4878 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
4879 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
4881 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
4882 end with SPE_BUILTIN_EVSUBFUSIAAW. */
4883 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
4884 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
4885 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
4886 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
4887 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
4888 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
4889 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
4890 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
4891 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
4892 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
4893 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
4894 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
4895 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
4896 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
4897 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
4898 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
4899 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
4900 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
4901 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
4902 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
4903 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
4904 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
4905 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
4906 { 0, CODE_FOR_spe_evneg, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
4907 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
4908 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
4909 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
4910 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
4911 { 0, CODE_FOR_spe_evsplatfi, "__builtin_spe_evsplatfi", SPE_BUILTIN_EVSPLATFI },
4912 { 0, CODE_FOR_spe_evsplati, "__builtin_spe_evsplati", SPE_BUILTIN_EVSPLATI },
4914 /* Place-holder. Leave as last unary SPE builtin. */
4915 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
4918 static rtx
4919 rs6000_expand_unop_builtin (icode, arglist, target)
4920 enum insn_code icode;
4921 tree arglist;
4922 rtx target;
4924 rtx pat;
4925 tree arg0 = TREE_VALUE (arglist);
4926 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4927 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4928 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4930 if (icode == CODE_FOR_nothing)
4931 /* Builtin not supported on this processor. */
4932 return 0;
4934 /* If we got invalid arguments bail out before generating bad rtl. */
4935 if (arg0 == error_mark_node)
4936 return const0_rtx;
4938 if (icode == CODE_FOR_altivec_vspltisb
4939 || icode == CODE_FOR_altivec_vspltish
4940 || icode == CODE_FOR_altivec_vspltisw
4941 || icode == CODE_FOR_spe_evsplatfi
4942 || icode == CODE_FOR_spe_evsplati)
4944 /* Only allow 5-bit *signed* literals. */
4945 if (GET_CODE (op0) != CONST_INT
4946 || INTVAL (op0) > 0x1f
4947 || INTVAL (op0) < -0x1f)
4949 error ("argument 1 must be a 5-bit signed literal");
4950 return const0_rtx;
4954 if (target == 0
4955 || GET_MODE (target) != tmode
4956 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4957 target = gen_reg_rtx (tmode);
4959 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4960 op0 = copy_to_mode_reg (mode0, op0);
4962 pat = GEN_FCN (icode) (target, op0);
4963 if (! pat)
4964 return 0;
4965 emit_insn (pat);
4967 return target;
4970 static rtx
4971 altivec_expand_abs_builtin (icode, arglist, target)
4972 enum insn_code icode;
4973 tree arglist;
4974 rtx target;
4976 rtx pat, scratch1, scratch2;
4977 tree arg0 = TREE_VALUE (arglist);
4978 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4979 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4980 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4982 /* If we have invalid arguments, bail out before generating bad rtl. */
4983 if (arg0 == error_mark_node)
4984 return const0_rtx;
4986 if (target == 0
4987 || GET_MODE (target) != tmode
4988 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4989 target = gen_reg_rtx (tmode);
4991 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4992 op0 = copy_to_mode_reg (mode0, op0);
4994 scratch1 = gen_reg_rtx (mode0);
4995 scratch2 = gen_reg_rtx (mode0);
4997 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
4998 if (! pat)
4999 return 0;
5000 emit_insn (pat);
5002 return target;
5005 static rtx
5006 rs6000_expand_binop_builtin (icode, arglist, target)
5007 enum insn_code icode;
5008 tree arglist;
5009 rtx target;
5011 rtx pat;
5012 tree arg0 = TREE_VALUE (arglist);
5013 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5014 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5015 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5016 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5017 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5018 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5020 if (icode == CODE_FOR_nothing)
5021 /* Builtin not supported on this processor. */
5022 return 0;
5024 /* If we got invalid arguments bail out before generating bad rtl. */
5025 if (arg0 == error_mark_node || arg1 == error_mark_node)
5026 return const0_rtx;
5028 if (icode == CODE_FOR_altivec_vcfux
5029 || icode == CODE_FOR_altivec_vcfsx
5030 || icode == CODE_FOR_altivec_vctsxs
5031 || icode == CODE_FOR_altivec_vctuxs
5032 || icode == CODE_FOR_altivec_vspltb
5033 || icode == CODE_FOR_altivec_vsplth
5034 || icode == CODE_FOR_altivec_vspltw
5035 || icode == CODE_FOR_spe_evaddiw
5036 || icode == CODE_FOR_spe_evldd
5037 || icode == CODE_FOR_spe_evldh
5038 || icode == CODE_FOR_spe_evldw
5039 || icode == CODE_FOR_spe_evlhhesplat
5040 || icode == CODE_FOR_spe_evlhhossplat
5041 || icode == CODE_FOR_spe_evlhhousplat
5042 || icode == CODE_FOR_spe_evlwhe
5043 || icode == CODE_FOR_spe_evlwhos
5044 || icode == CODE_FOR_spe_evlwhou
5045 || icode == CODE_FOR_spe_evlwhsplat
5046 || icode == CODE_FOR_spe_evlwwsplat
5047 || icode == CODE_FOR_spe_evrlwi
5048 || icode == CODE_FOR_spe_evslwi
5049 || icode == CODE_FOR_spe_evsrwis
5050 || icode == CODE_FOR_spe_evsubifw
5051 || icode == CODE_FOR_spe_evsrwiu)
5053 /* Only allow 5-bit unsigned literals. */
5054 if (TREE_CODE (arg1) != INTEGER_CST
5055 || TREE_INT_CST_LOW (arg1) & ~0x1f)
5057 error ("argument 2 must be a 5-bit unsigned literal");
5058 return const0_rtx;
5062 if (target == 0
5063 || GET_MODE (target) != tmode
5064 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5065 target = gen_reg_rtx (tmode);
5067 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5068 op0 = copy_to_mode_reg (mode0, op0);
5069 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5070 op1 = copy_to_mode_reg (mode1, op1);
5072 pat = GEN_FCN (icode) (target, op0, op1);
5073 if (! pat)
5074 return 0;
5075 emit_insn (pat);
5077 return target;
5080 static rtx
5081 altivec_expand_predicate_builtin (icode, opcode, arglist, target)
5082 enum insn_code icode;
5083 const char *opcode;
5084 tree arglist;
5085 rtx target;
5087 rtx pat, scratch;
5088 tree cr6_form = TREE_VALUE (arglist);
5089 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
5090 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5091 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5092 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5093 enum machine_mode tmode = SImode;
5094 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5095 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5096 int cr6_form_int;
5098 if (TREE_CODE (cr6_form) != INTEGER_CST)
5100 error ("argument 1 of __builtin_altivec_predicate must be a constant");
5101 return const0_rtx;
5103 else
5104 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
5106 if (mode0 != mode1)
5107 abort ();
5109 /* If we have invalid arguments, bail out before generating bad rtl. */
5110 if (arg0 == error_mark_node || arg1 == error_mark_node)
5111 return const0_rtx;
5113 if (target == 0
5114 || GET_MODE (target) != tmode
5115 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5116 target = gen_reg_rtx (tmode);
5118 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5119 op0 = copy_to_mode_reg (mode0, op0);
5120 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5121 op1 = copy_to_mode_reg (mode1, op1);
5123 scratch = gen_reg_rtx (mode0);
5125 pat = GEN_FCN (icode) (scratch, op0, op1,
5126 gen_rtx (SYMBOL_REF, Pmode, opcode));
5127 if (! pat)
5128 return 0;
5129 emit_insn (pat);
5131 /* The vec_any* and vec_all* predicates use the same opcodes for two
5132 different operations, but the bits in CR6 will be different
5133 depending on what information we want. So we have to play tricks
5134 with CR6 to get the right bits out.
5136 If you think this is disgusting, look at the specs for the
5137 AltiVec predicates. */
5139 switch (cr6_form_int)
5141 case 0:
5142 emit_insn (gen_cr6_test_for_zero (target));
5143 break;
5144 case 1:
5145 emit_insn (gen_cr6_test_for_zero_reverse (target));
5146 break;
5147 case 2:
5148 emit_insn (gen_cr6_test_for_lt (target));
5149 break;
5150 case 3:
5151 emit_insn (gen_cr6_test_for_lt_reverse (target));
5152 break;
5153 default:
5154 error ("argument 1 of __builtin_altivec_predicate is out of range");
5155 break;
5158 return target;
5161 static rtx
5162 altivec_expand_stv_builtin (icode, arglist)
5163 enum insn_code icode;
5164 tree arglist;
5166 tree arg0 = TREE_VALUE (arglist);
5167 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5168 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5169 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5170 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5171 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5172 rtx pat;
5173 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
5174 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
5175 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
5177 /* Invalid arguments. Bail before doing anything stoopid! */
5178 if (arg0 == error_mark_node
5179 || arg1 == error_mark_node
5180 || arg2 == error_mark_node)
5181 return const0_rtx;
5183 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
5184 op0 = copy_to_mode_reg (mode2, op0);
5185 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
5186 op1 = copy_to_mode_reg (mode0, op1);
5187 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
5188 op2 = copy_to_mode_reg (mode1, op2);
5190 pat = GEN_FCN (icode) (op1, op2, op0);
5191 if (pat)
5192 emit_insn (pat);
5193 return NULL_RTX;
5196 static rtx
5197 rs6000_expand_ternop_builtin (icode, arglist, target)
5198 enum insn_code icode;
5199 tree arglist;
5200 rtx target;
5202 rtx pat;
5203 tree arg0 = TREE_VALUE (arglist);
5204 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5205 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5206 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5207 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5208 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5209 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5210 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5211 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5212 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
5214 if (icode == CODE_FOR_nothing)
5215 /* Builtin not supported on this processor. */
5216 return 0;
5218 /* If we got invalid arguments bail out before generating bad rtl. */
5219 if (arg0 == error_mark_node
5220 || arg1 == error_mark_node
5221 || arg2 == error_mark_node)
5222 return const0_rtx;
5224 if (icode == CODE_FOR_altivec_vsldoi_4sf
5225 || icode == CODE_FOR_altivec_vsldoi_4si
5226 || icode == CODE_FOR_altivec_vsldoi_8hi
5227 || icode == CODE_FOR_altivec_vsldoi_16qi)
5229 /* Only allow 4-bit unsigned literals. */
5230 if (TREE_CODE (arg2) != INTEGER_CST
5231 || TREE_INT_CST_LOW (arg2) & ~0xf)
5233 error ("argument 3 must be a 4-bit unsigned literal");
5234 return const0_rtx;
5238 if (target == 0
5239 || GET_MODE (target) != tmode
5240 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5241 target = gen_reg_rtx (tmode);
5243 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5244 op0 = copy_to_mode_reg (mode0, op0);
5245 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5246 op1 = copy_to_mode_reg (mode1, op1);
5247 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
5248 op2 = copy_to_mode_reg (mode2, op2);
5250 pat = GEN_FCN (icode) (target, op0, op1, op2);
5251 if (! pat)
5252 return 0;
5253 emit_insn (pat);
5255 return target;
5258 /* Expand the lvx builtins. */
5259 static rtx
5260 altivec_expand_ld_builtin (exp, target, expandedp)
5261 tree exp;
5262 rtx target;
5263 bool *expandedp;
5265 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5266 tree arglist = TREE_OPERAND (exp, 1);
5267 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5268 tree arg0;
5269 enum machine_mode tmode, mode0;
5270 rtx pat, op0;
5271 enum insn_code icode;
5273 switch (fcode)
5275 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
5276 icode = CODE_FOR_altivec_lvx_16qi;
5277 break;
5278 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
5279 icode = CODE_FOR_altivec_lvx_8hi;
5280 break;
5281 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
5282 icode = CODE_FOR_altivec_lvx_4si;
5283 break;
5284 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
5285 icode = CODE_FOR_altivec_lvx_4sf;
5286 break;
5287 default:
5288 *expandedp = false;
5289 return NULL_RTX;
5292 *expandedp = true;
5294 arg0 = TREE_VALUE (arglist);
5295 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5296 tmode = insn_data[icode].operand[0].mode;
5297 mode0 = insn_data[icode].operand[1].mode;
5299 if (target == 0
5300 || GET_MODE (target) != tmode
5301 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5302 target = gen_reg_rtx (tmode);
5304 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5305 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
5307 pat = GEN_FCN (icode) (target, op0);
5308 if (! pat)
5309 return 0;
5310 emit_insn (pat);
5311 return target;
5314 /* Expand the stvx builtins. */
5315 static rtx
5316 altivec_expand_st_builtin (exp, target, expandedp)
5317 tree exp;
5318 rtx target ATTRIBUTE_UNUSED;
5319 bool *expandedp;
5321 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5322 tree arglist = TREE_OPERAND (exp, 1);
5323 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5324 tree arg0, arg1;
5325 enum machine_mode mode0, mode1;
5326 rtx pat, op0, op1;
5327 enum insn_code icode;
5329 switch (fcode)
5331 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
5332 icode = CODE_FOR_altivec_stvx_16qi;
5333 break;
5334 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
5335 icode = CODE_FOR_altivec_stvx_8hi;
5336 break;
5337 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
5338 icode = CODE_FOR_altivec_stvx_4si;
5339 break;
5340 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
5341 icode = CODE_FOR_altivec_stvx_4sf;
5342 break;
5343 default:
5344 *expandedp = false;
5345 return NULL_RTX;
5348 arg0 = TREE_VALUE (arglist);
5349 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5350 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5351 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5352 mode0 = insn_data[icode].operand[0].mode;
5353 mode1 = insn_data[icode].operand[1].mode;
5355 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5356 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
5357 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
5358 op1 = copy_to_mode_reg (mode1, op1);
5360 pat = GEN_FCN (icode) (op0, op1);
5361 if (pat)
5362 emit_insn (pat);
5364 *expandedp = true;
5365 return NULL_RTX;
5368 /* Expand the dst builtins. */
5369 static rtx
5370 altivec_expand_dst_builtin (exp, target, expandedp)
5371 tree exp;
5372 rtx target ATTRIBUTE_UNUSED;
5373 bool *expandedp;
5375 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5376 tree arglist = TREE_OPERAND (exp, 1);
5377 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5378 tree arg0, arg1, arg2;
5379 enum machine_mode mode0, mode1, mode2;
5380 rtx pat, op0, op1, op2;
5381 struct builtin_description *d;
5382 size_t i;
5384 *expandedp = false;
5386 /* Handle DST variants. */
5387 d = (struct builtin_description *) bdesc_dst;
5388 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
5389 if (d->code == fcode)
5391 arg0 = TREE_VALUE (arglist);
5392 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5393 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5394 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5395 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5396 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5397 mode0 = insn_data[d->icode].operand[0].mode;
5398 mode1 = insn_data[d->icode].operand[1].mode;
5399 mode2 = insn_data[d->icode].operand[2].mode;
5401 /* Invalid arguments, bail out before generating bad rtl. */
5402 if (arg0 == error_mark_node
5403 || arg1 == error_mark_node
5404 || arg2 == error_mark_node)
5405 return const0_rtx;
5407 if (TREE_CODE (arg2) != INTEGER_CST
5408 || TREE_INT_CST_LOW (arg2) & ~0x3)
5410 error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
5411 return const0_rtx;
5414 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
5415 op0 = copy_to_mode_reg (mode0, op0);
5416 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
5417 op1 = copy_to_mode_reg (mode1, op1);
5419 pat = GEN_FCN (d->icode) (op0, op1, op2);
5420 if (pat != 0)
5421 emit_insn (pat);
5423 *expandedp = true;
5424 return NULL_RTX;
5427 return NULL_RTX;
5430 /* Expand the builtin in EXP and store the result in TARGET. Store
5431 true in *EXPANDEDP if we found a builtin to expand. */
5432 static rtx
5433 altivec_expand_builtin (exp, target, expandedp)
5434 tree exp;
5435 rtx target;
5436 bool *expandedp;
5438 struct builtin_description *d;
5439 struct builtin_description_predicates *dp;
5440 size_t i;
5441 enum insn_code icode;
5442 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5443 tree arglist = TREE_OPERAND (exp, 1);
5444 tree arg0;
5445 rtx op0, pat;
5446 enum machine_mode tmode, mode0;
5447 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5449 target = altivec_expand_ld_builtin (exp, target, expandedp);
5450 if (*expandedp)
5451 return target;
5453 target = altivec_expand_st_builtin (exp, target, expandedp);
5454 if (*expandedp)
5455 return target;
5457 target = altivec_expand_dst_builtin (exp, target, expandedp);
5458 if (*expandedp)
5459 return target;
5461 *expandedp = true;
5463 switch (fcode)
5465 case ALTIVEC_BUILTIN_STVX:
5466 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
5467 case ALTIVEC_BUILTIN_STVEBX:
5468 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
5469 case ALTIVEC_BUILTIN_STVEHX:
5470 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
5471 case ALTIVEC_BUILTIN_STVEWX:
5472 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
5473 case ALTIVEC_BUILTIN_STVXL:
5474 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
5476 case ALTIVEC_BUILTIN_MFVSCR:
5477 icode = CODE_FOR_altivec_mfvscr;
5478 tmode = insn_data[icode].operand[0].mode;
5480 if (target == 0
5481 || GET_MODE (target) != tmode
5482 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5483 target = gen_reg_rtx (tmode);
5485 pat = GEN_FCN (icode) (target);
5486 if (! pat)
5487 return 0;
5488 emit_insn (pat);
5489 return target;
5491 case ALTIVEC_BUILTIN_MTVSCR:
5492 icode = CODE_FOR_altivec_mtvscr;
5493 arg0 = TREE_VALUE (arglist);
5494 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5495 mode0 = insn_data[icode].operand[0].mode;
5497 /* If we got invalid arguments bail out before generating bad rtl. */
5498 if (arg0 == error_mark_node)
5499 return const0_rtx;
5501 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5502 op0 = copy_to_mode_reg (mode0, op0);
5504 pat = GEN_FCN (icode) (op0);
5505 if (pat)
5506 emit_insn (pat);
5507 return NULL_RTX;
5509 case ALTIVEC_BUILTIN_DSSALL:
5510 emit_insn (gen_altivec_dssall ());
5511 return NULL_RTX;
5513 case ALTIVEC_BUILTIN_DSS:
5514 icode = CODE_FOR_altivec_dss;
5515 arg0 = TREE_VALUE (arglist);
5516 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5517 mode0 = insn_data[icode].operand[0].mode;
5519 /* If we got invalid arguments bail out before generating bad rtl. */
5520 if (arg0 == error_mark_node)
5521 return const0_rtx;
5523 if (TREE_CODE (arg0) != INTEGER_CST
5524 || TREE_INT_CST_LOW (arg0) & ~0x3)
5526 error ("argument to dss must be a 2-bit unsigned literal");
5527 return const0_rtx;
5530 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5531 op0 = copy_to_mode_reg (mode0, op0);
5533 emit_insn (gen_altivec_dss (op0));
5534 return NULL_RTX;
5537 /* Expand abs* operations. */
5538 d = (struct builtin_description *) bdesc_abs;
5539 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
5540 if (d->code == fcode)
5541 return altivec_expand_abs_builtin (d->icode, arglist, target);
5543 /* Expand the AltiVec predicates. */
5544 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
5545 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
5546 if (dp->code == fcode)
5547 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
5549 /* LV* are funky. We initialized them differently. */
5550 switch (fcode)
5552 case ALTIVEC_BUILTIN_LVSL:
5553 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsl,
5554 arglist, target);
5555 case ALTIVEC_BUILTIN_LVSR:
5556 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsr,
5557 arglist, target);
5558 case ALTIVEC_BUILTIN_LVEBX:
5559 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvebx,
5560 arglist, target);
5561 case ALTIVEC_BUILTIN_LVEHX:
5562 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvehx,
5563 arglist, target);
5564 case ALTIVEC_BUILTIN_LVEWX:
5565 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvewx,
5566 arglist, target);
5567 case ALTIVEC_BUILTIN_LVXL:
5568 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvxl,
5569 arglist, target);
5570 case ALTIVEC_BUILTIN_LVX:
5571 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvx,
5572 arglist, target);
5573 default:
5574 break;
5575 /* Fall through. */
5578 *expandedp = false;
5579 return NULL_RTX;
5582 /* Binops that need to be initialized manually, but can be expanded
5583 automagically by rs6000_expand_binop_builtin. */
5584 static struct builtin_description bdesc_2arg_spe[] =
5586 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
5587 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
5588 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
5589 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
5590 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
5591 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
5592 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
5593 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
5594 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
5595 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
5596 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
5597 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
5598 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
5599 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
5600 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
5601 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
5602 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
5603 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
5604 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
5605 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
5606 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
5607 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
5610 /* Expand the builtin in EXP and store the result in TARGET. Store
5611 true in *EXPANDEDP if we found a builtin to expand.
5613 This expands the SPE builtins that are not simple unary and binary
5614 operations. */
5615 static rtx
5616 spe_expand_builtin (exp, target, expandedp)
5617 tree exp;
5618 rtx target;
5619 bool *expandedp;
5621 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5622 tree arglist = TREE_OPERAND (exp, 1);
5623 tree arg1, arg0;
5624 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5625 enum insn_code icode;
5626 enum machine_mode tmode, mode0;
5627 rtx pat, op0;
5628 struct builtin_description *d;
5629 size_t i;
5631 *expandedp = true;
5633 /* Syntax check for a 5-bit unsigned immediate. */
5634 switch (fcode)
5636 case SPE_BUILTIN_EVSTDD:
5637 case SPE_BUILTIN_EVSTDH:
5638 case SPE_BUILTIN_EVSTDW:
5639 case SPE_BUILTIN_EVSTWHE:
5640 case SPE_BUILTIN_EVSTWHO:
5641 case SPE_BUILTIN_EVSTWWE:
5642 case SPE_BUILTIN_EVSTWWO:
5643 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5644 if (TREE_CODE (arg1) != INTEGER_CST
5645 || TREE_INT_CST_LOW (arg1) & ~0x1f)
5647 error ("argument 2 must be a 5-bit unsigned literal");
5648 return const0_rtx;
5650 break;
5651 default:
5652 break;
5655 d = (struct builtin_description *) bdesc_2arg_spe;
5656 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
5657 if (d->code == fcode)
5658 return rs6000_expand_binop_builtin (d->icode, arglist, target);
5660 d = (struct builtin_description *) bdesc_spe_predicates;
5661 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
5662 if (d->code == fcode)
5663 return spe_expand_predicate_builtin (d->icode, arglist, target);
5665 d = (struct builtin_description *) bdesc_spe_evsel;
5666 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
5667 if (d->code == fcode)
5668 return spe_expand_evsel_builtin (d->icode, arglist, target);
5670 switch (fcode)
5672 case SPE_BUILTIN_EVSTDDX:
5673 return altivec_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
5674 case SPE_BUILTIN_EVSTDHX:
5675 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
5676 case SPE_BUILTIN_EVSTDWX:
5677 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
5678 case SPE_BUILTIN_EVSTWHEX:
5679 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
5680 case SPE_BUILTIN_EVSTWHOX:
5681 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
5682 case SPE_BUILTIN_EVSTWWEX:
5683 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
5684 case SPE_BUILTIN_EVSTWWOX:
5685 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
5686 case SPE_BUILTIN_EVSTDD:
5687 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
5688 case SPE_BUILTIN_EVSTDH:
5689 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
5690 case SPE_BUILTIN_EVSTDW:
5691 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
5692 case SPE_BUILTIN_EVSTWHE:
5693 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
5694 case SPE_BUILTIN_EVSTWHO:
5695 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
5696 case SPE_BUILTIN_EVSTWWE:
5697 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
5698 case SPE_BUILTIN_EVSTWWO:
5699 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
5700 case SPE_BUILTIN_MFSPEFSCR:
5701 icode = CODE_FOR_spe_mfspefscr;
5702 tmode = insn_data[icode].operand[0].mode;
5704 if (target == 0
5705 || GET_MODE (target) != tmode
5706 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5707 target = gen_reg_rtx (tmode);
5709 pat = GEN_FCN (icode) (target);
5710 if (! pat)
5711 return 0;
5712 emit_insn (pat);
5713 return target;
5714 case SPE_BUILTIN_MTSPEFSCR:
5715 icode = CODE_FOR_spe_mtspefscr;
5716 arg0 = TREE_VALUE (arglist);
5717 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5718 mode0 = insn_data[icode].operand[0].mode;
5720 if (arg0 == error_mark_node)
5721 return const0_rtx;
5723 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5724 op0 = copy_to_mode_reg (mode0, op0);
5726 pat = GEN_FCN (icode) (op0);
5727 if (pat)
5728 emit_insn (pat);
5729 return NULL_RTX;
5730 default:
5731 break;
5734 *expandedp = false;
5735 return NULL_RTX;
5738 static rtx
5739 spe_expand_predicate_builtin (icode, arglist, target)
5740 enum insn_code icode;
5741 tree arglist;
5742 rtx target;
5744 rtx pat, scratch, tmp;
5745 tree form = TREE_VALUE (arglist);
5746 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
5747 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5748 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5749 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5750 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5751 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5752 int form_int;
5753 enum rtx_code code;
5755 if (TREE_CODE (form) != INTEGER_CST)
5757 error ("argument 1 of __builtin_spe_predicate must be a constant");
5758 return const0_rtx;
5760 else
5761 form_int = TREE_INT_CST_LOW (form);
5763 if (mode0 != mode1)
5764 abort ();
5766 if (arg0 == error_mark_node || arg1 == error_mark_node)
5767 return const0_rtx;
5769 if (target == 0
5770 || GET_MODE (target) != SImode
5771 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
5772 target = gen_reg_rtx (SImode);
5774 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5775 op0 = copy_to_mode_reg (mode0, op0);
5776 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5777 op1 = copy_to_mode_reg (mode1, op1);
5779 scratch = gen_reg_rtx (CCmode);
5781 pat = GEN_FCN (icode) (scratch, op0, op1);
5782 if (! pat)
5783 return const0_rtx;
5784 emit_insn (pat);
5786 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
5787 _lower_. We use one compare, but look in different bits of the
5788 CR for each variant.
5790 There are 2 elements in each SPE simd type (upper/lower). The CR
5791 bits are set as follows:
5793 BIT0 | BIT 1 | BIT 2 | BIT 3
5794 U | L | (U | L) | (U & L)
5796 So, for an "all" relationship, BIT 3 would be set.
5797 For an "any" relationship, BIT 2 would be set. Etc.
5799 Following traditional nomenclature, these bits map to:
5801 BIT0 | BIT 1 | BIT 2 | BIT 3
5802 LT | GT | EQ | OV
5804 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
5807 switch (form_int)
5809 /* All variant. OV bit. */
5810 case 0:
5811 /* We need to get to the OV bit, which is the ORDERED bit. We
5812 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
5813 that's ugly and will trigger a validate_condition_mode abort.
5814 So let's just use another pattern. */
5815 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
5816 return target;
5817 /* Any variant. EQ bit. */
5818 case 1:
5819 code = EQ;
5820 break;
5821 /* Upper variant. LT bit. */
5822 case 2:
5823 code = LT;
5824 break;
5825 /* Lower variant. GT bit. */
5826 case 3:
5827 code = GT;
5828 break;
5829 default:
5830 error ("argument 1 of __builtin_spe_predicate is out of range");
5831 return const0_rtx;
5834 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
5835 emit_move_insn (target, tmp);
5837 return target;
5840 /* The evsel builtins look like this:
5842 e = __builtin_spe_evsel_OP (a, b, c, d);
5844 and work like this:
5846 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
5847 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
5850 static rtx
5851 spe_expand_evsel_builtin (icode, arglist, target)
5852 enum insn_code icode;
5853 tree arglist;
5854 rtx target;
5856 rtx pat, scratch;
5857 tree arg0 = TREE_VALUE (arglist);
5858 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5859 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5860 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
5861 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5862 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5863 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5864 rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
5865 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5866 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5868 if (mode0 != mode1)
5869 abort ();
5871 if (arg0 == error_mark_node || arg1 == error_mark_node
5872 || arg2 == error_mark_node || arg3 == error_mark_node)
5873 return const0_rtx;
5875 if (target == 0
5876 || GET_MODE (target) != mode0
5877 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
5878 target = gen_reg_rtx (mode0);
5880 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5881 op0 = copy_to_mode_reg (mode0, op0);
5882 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
5883 op1 = copy_to_mode_reg (mode0, op1);
5884 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
5885 op2 = copy_to_mode_reg (mode0, op2);
5886 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
5887 op3 = copy_to_mode_reg (mode0, op3);
5889 /* Generate the compare. */
5890 scratch = gen_reg_rtx (CCmode);
5891 pat = GEN_FCN (icode) (scratch, op0, op1);
5892 if (! pat)
5893 return const0_rtx;
5894 emit_insn (pat);
5896 if (mode0 == V2SImode)
5897 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
5898 else
5899 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
5901 return target;
5904 /* Expand an expression EXP that calls a built-in function,
5905 with result going to TARGET if that's convenient
5906 (and in mode MODE if that's convenient).
5907 SUBTARGET may be used as the target for computing one of EXP's operands.
5908 IGNORE is nonzero if the value is to be ignored. */
5910 static rtx
5911 rs6000_expand_builtin (exp, target, subtarget, mode, ignore)
5912 tree exp;
5913 rtx target;
5914 rtx subtarget ATTRIBUTE_UNUSED;
5915 enum machine_mode mode ATTRIBUTE_UNUSED;
5916 int ignore ATTRIBUTE_UNUSED;
5918 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5919 tree arglist = TREE_OPERAND (exp, 1);
5920 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5921 struct builtin_description *d;
5922 size_t i;
5923 rtx ret;
5924 bool success;
5926 if (TARGET_ALTIVEC)
5928 ret = altivec_expand_builtin (exp, target, &success);
5930 if (success)
5931 return ret;
5933 if (TARGET_SPE)
5935 ret = spe_expand_builtin (exp, target, &success);
5937 if (success)
5938 return ret;
5941 if (TARGET_ALTIVEC || TARGET_SPE)
5943 /* Handle simple unary operations. */
5944 d = (struct builtin_description *) bdesc_1arg;
5945 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
5946 if (d->code == fcode)
5947 return rs6000_expand_unop_builtin (d->icode, arglist, target);
5949 /* Handle simple binary operations. */
5950 d = (struct builtin_description *) bdesc_2arg;
5951 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
5952 if (d->code == fcode)
5953 return rs6000_expand_binop_builtin (d->icode, arglist, target);
5955 /* Handle simple ternary operations. */
5956 d = (struct builtin_description *) bdesc_3arg;
5957 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
5958 if (d->code == fcode)
5959 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
5962 abort ();
5963 return NULL_RTX;
5966 static void
5967 rs6000_init_builtins ()
5969 opaque_V2SI_type_node = copy_node (V2SI_type_node);
5970 opaque_V2SF_type_node = copy_node (V2SF_type_node);
5971 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
5973 if (TARGET_SPE)
5974 spe_init_builtins ();
5975 if (TARGET_ALTIVEC)
5976 altivec_init_builtins ();
5977 if (TARGET_ALTIVEC || TARGET_SPE)
5978 rs6000_common_init_builtins ();
5981 /* Search through a set of builtins and enable the mask bits.
5982 DESC is an array of builtins.
5983 SIZE is the total number of builtins.
5984 START is the builtin enum at which to start.
5985 END is the builtin enum at which to end. */
5986 static void
5987 enable_mask_for_builtins (desc, size, start, end)
5988 struct builtin_description *desc;
5989 int size;
5990 enum rs6000_builtins start, end;
5992 int i;
5994 for (i = 0; i < size; ++i)
5995 if (desc[i].code == start)
5996 break;
5998 if (i == size)
5999 return;
6001 for (; i < size; ++i)
6003 /* Flip all the bits on. */
6004 desc[i].mask = target_flags;
6005 if (desc[i].code == end)
6006 break;
6010 static void
6011 spe_init_builtins ()
6013 tree endlink = void_list_node;
6014 tree puint_type_node = build_pointer_type (unsigned_type_node);
6015 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
6016 struct builtin_description *d;
6017 size_t i;
6019 tree v2si_ftype_4_v2si
6020 = build_function_type
6021 (opaque_V2SI_type_node,
6022 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6023 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6024 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6025 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6026 endlink)))));
6028 tree v2sf_ftype_4_v2sf
6029 = build_function_type
6030 (opaque_V2SF_type_node,
6031 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6032 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6033 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6034 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6035 endlink)))));
6037 tree int_ftype_int_v2si_v2si
6038 = build_function_type
6039 (integer_type_node,
6040 tree_cons (NULL_TREE, integer_type_node,
6041 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6042 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6043 endlink))));
6045 tree int_ftype_int_v2sf_v2sf
6046 = build_function_type
6047 (integer_type_node,
6048 tree_cons (NULL_TREE, integer_type_node,
6049 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6050 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6051 endlink))));
6053 tree void_ftype_v2si_puint_int
6054 = build_function_type (void_type_node,
6055 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6056 tree_cons (NULL_TREE, puint_type_node,
6057 tree_cons (NULL_TREE,
6058 integer_type_node,
6059 endlink))));
6061 tree void_ftype_v2si_puint_char
6062 = build_function_type (void_type_node,
6063 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6064 tree_cons (NULL_TREE, puint_type_node,
6065 tree_cons (NULL_TREE,
6066 char_type_node,
6067 endlink))));
6069 tree void_ftype_v2si_pv2si_int
6070 = build_function_type (void_type_node,
6071 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6072 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6073 tree_cons (NULL_TREE,
6074 integer_type_node,
6075 endlink))));
6077 tree void_ftype_v2si_pv2si_char
6078 = build_function_type (void_type_node,
6079 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6080 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6081 tree_cons (NULL_TREE,
6082 char_type_node,
6083 endlink))));
6085 tree void_ftype_int
6086 = build_function_type (void_type_node,
6087 tree_cons (NULL_TREE, integer_type_node, endlink));
6089 tree int_ftype_void
6090 = build_function_type (integer_type_node, endlink);
6092 tree v2si_ftype_pv2si_int
6093 = build_function_type (opaque_V2SI_type_node,
6094 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6095 tree_cons (NULL_TREE, integer_type_node,
6096 endlink)));
6098 tree v2si_ftype_puint_int
6099 = build_function_type (opaque_V2SI_type_node,
6100 tree_cons (NULL_TREE, puint_type_node,
6101 tree_cons (NULL_TREE, integer_type_node,
6102 endlink)));
6104 tree v2si_ftype_pushort_int
6105 = build_function_type (opaque_V2SI_type_node,
6106 tree_cons (NULL_TREE, pushort_type_node,
6107 tree_cons (NULL_TREE, integer_type_node,
6108 endlink)));
6110 /* The initialization of the simple binary and unary builtins is
6111 done in rs6000_common_init_builtins, but we have to enable the
6112 mask bits here manually because we have run out of `target_flags'
6113 bits. We really need to redesign this mask business. */
6115 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
6116 ARRAY_SIZE (bdesc_2arg),
6117 SPE_BUILTIN_EVADDW,
6118 SPE_BUILTIN_EVXOR);
6119 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
6120 ARRAY_SIZE (bdesc_1arg),
6121 SPE_BUILTIN_EVABS,
6122 SPE_BUILTIN_EVSUBFUSIAAW);
6123 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
6124 ARRAY_SIZE (bdesc_spe_predicates),
6125 SPE_BUILTIN_EVCMPEQ,
6126 SPE_BUILTIN_EVFSTSTLT);
6127 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
6128 ARRAY_SIZE (bdesc_spe_evsel),
6129 SPE_BUILTIN_EVSEL_CMPGTS,
6130 SPE_BUILTIN_EVSEL_FSTSTEQ);
6132 /* Initialize irregular SPE builtins. */
6134 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
6135 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
6136 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
6137 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
6138 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
6139 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
6140 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
6141 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
6142 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
6143 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
6144 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
6145 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
6146 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
6147 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
6148 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
6149 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
6151 /* Loads. */
6152 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
6153 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
6154 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
6155 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
6156 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
6157 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
6158 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
6159 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
6160 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
6161 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
6162 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
6163 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
6164 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
6165 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
6166 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
6167 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
6168 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
6169 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
6170 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
6171 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
6172 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
6173 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
6175 /* Predicates. */
6176 d = (struct builtin_description *) bdesc_spe_predicates;
6177 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
6179 tree type;
6181 switch (insn_data[d->icode].operand[1].mode)
6183 case V2SImode:
6184 type = int_ftype_int_v2si_v2si;
6185 break;
6186 case V2SFmode:
6187 type = int_ftype_int_v2sf_v2sf;
6188 break;
6189 default:
6190 abort ();
6193 def_builtin (d->mask, d->name, type, d->code);
6196 /* Evsel predicates. */
6197 d = (struct builtin_description *) bdesc_spe_evsel;
6198 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
6200 tree type;
6202 switch (insn_data[d->icode].operand[1].mode)
6204 case V2SImode:
6205 type = v2si_ftype_4_v2si;
6206 break;
6207 case V2SFmode:
6208 type = v2sf_ftype_4_v2sf;
6209 break;
6210 default:
6211 abort ();
6214 def_builtin (d->mask, d->name, type, d->code);
6218 static void
6219 altivec_init_builtins ()
6221 struct builtin_description *d;
6222 struct builtin_description_predicates *dp;
6223 size_t i;
6224 tree pfloat_type_node = build_pointer_type (float_type_node);
6225 tree pint_type_node = build_pointer_type (integer_type_node);
6226 tree pshort_type_node = build_pointer_type (short_integer_type_node);
6227 tree pchar_type_node = build_pointer_type (char_type_node);
6229 tree pvoid_type_node = build_pointer_type (void_type_node);
6231 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
6232 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
6233 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
6234 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
6236 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
6238 tree int_ftype_int_v4si_v4si
6239 = build_function_type_list (integer_type_node,
6240 integer_type_node, V4SI_type_node,
6241 V4SI_type_node, NULL_TREE);
6242 tree v4sf_ftype_pcfloat
6243 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
6244 tree void_ftype_pfloat_v4sf
6245 = build_function_type_list (void_type_node,
6246 pfloat_type_node, V4SF_type_node, NULL_TREE);
6247 tree v4si_ftype_pcint
6248 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
6249 tree void_ftype_pint_v4si
6250 = build_function_type_list (void_type_node,
6251 pint_type_node, V4SI_type_node, NULL_TREE);
6252 tree v8hi_ftype_pcshort
6253 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
6254 tree void_ftype_pshort_v8hi
6255 = build_function_type_list (void_type_node,
6256 pshort_type_node, V8HI_type_node, NULL_TREE);
6257 tree v16qi_ftype_pcchar
6258 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
6259 tree void_ftype_pchar_v16qi
6260 = build_function_type_list (void_type_node,
6261 pchar_type_node, V16QI_type_node, NULL_TREE);
6262 tree void_ftype_v4si
6263 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
6264 tree v8hi_ftype_void
6265 = build_function_type (V8HI_type_node, void_list_node);
6266 tree void_ftype_void
6267 = build_function_type (void_type_node, void_list_node);
6268 tree void_ftype_qi
6269 = build_function_type_list (void_type_node, char_type_node, NULL_TREE);
6271 tree v16qi_ftype_int_pcvoid
6272 = build_function_type_list (V16QI_type_node,
6273 integer_type_node, pcvoid_type_node, NULL_TREE);
6274 tree v8hi_ftype_int_pcvoid
6275 = build_function_type_list (V8HI_type_node,
6276 integer_type_node, pcvoid_type_node, NULL_TREE);
6277 tree v4si_ftype_int_pcvoid
6278 = build_function_type_list (V4SI_type_node,
6279 integer_type_node, pcvoid_type_node, NULL_TREE);
6281 tree void_ftype_v4si_int_pvoid
6282 = build_function_type_list (void_type_node,
6283 V4SI_type_node, integer_type_node,
6284 pvoid_type_node, NULL_TREE);
6285 tree void_ftype_v16qi_int_pvoid
6286 = build_function_type_list (void_type_node,
6287 V16QI_type_node, integer_type_node,
6288 pvoid_type_node, NULL_TREE);
6289 tree void_ftype_v8hi_int_pvoid
6290 = build_function_type_list (void_type_node,
6291 V8HI_type_node, integer_type_node,
6292 pvoid_type_node, NULL_TREE);
6293 tree int_ftype_int_v8hi_v8hi
6294 = build_function_type_list (integer_type_node,
6295 integer_type_node, V8HI_type_node,
6296 V8HI_type_node, NULL_TREE);
6297 tree int_ftype_int_v16qi_v16qi
6298 = build_function_type_list (integer_type_node,
6299 integer_type_node, V16QI_type_node,
6300 V16QI_type_node, NULL_TREE);
6301 tree int_ftype_int_v4sf_v4sf
6302 = build_function_type_list (integer_type_node,
6303 integer_type_node, V4SF_type_node,
6304 V4SF_type_node, NULL_TREE);
6305 tree v4si_ftype_v4si
6306 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
6307 tree v8hi_ftype_v8hi
6308 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
6309 tree v16qi_ftype_v16qi
6310 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
6311 tree v4sf_ftype_v4sf
6312 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
6313 tree void_ftype_pcvoid_int_char
6314 = build_function_type_list (void_type_node,
6315 pcvoid_type_node, integer_type_node,
6316 char_type_node, NULL_TREE);
6318 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
6319 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
6320 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
6321 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
6322 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
6323 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
6324 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
6325 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
6326 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
6327 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
6328 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
6329 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
6330 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
6331 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
6332 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
6333 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
6334 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
6335 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
6336 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
6337 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
6338 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVSL);
6339 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVSR);
6340 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEBX);
6341 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEHX);
6342 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEWX);
6343 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVXL);
6344 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVX);
6345 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVX);
6346 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVEWX);
6347 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVXL);
6348 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid, ALTIVEC_BUILTIN_STVEBX);
6349 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid, ALTIVEC_BUILTIN_STVEHX);
6351 /* Add the DST variants. */
6352 d = (struct builtin_description *) bdesc_dst;
6353 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
6354 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_char, d->code);
6356 /* Initialize the predicates. */
6357 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
6358 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
6360 enum machine_mode mode1;
6361 tree type;
6363 mode1 = insn_data[dp->icode].operand[1].mode;
6365 switch (mode1)
6367 case V4SImode:
6368 type = int_ftype_int_v4si_v4si;
6369 break;
6370 case V8HImode:
6371 type = int_ftype_int_v8hi_v8hi;
6372 break;
6373 case V16QImode:
6374 type = int_ftype_int_v16qi_v16qi;
6375 break;
6376 case V4SFmode:
6377 type = int_ftype_int_v4sf_v4sf;
6378 break;
6379 default:
6380 abort ();
6383 def_builtin (dp->mask, dp->name, type, dp->code);
6386 /* Initialize the abs* operators. */
6387 d = (struct builtin_description *) bdesc_abs;
6388 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
6390 enum machine_mode mode0;
6391 tree type;
6393 mode0 = insn_data[d->icode].operand[0].mode;
6395 switch (mode0)
6397 case V4SImode:
6398 type = v4si_ftype_v4si;
6399 break;
6400 case V8HImode:
6401 type = v8hi_ftype_v8hi;
6402 break;
6403 case V16QImode:
6404 type = v16qi_ftype_v16qi;
6405 break;
6406 case V4SFmode:
6407 type = v4sf_ftype_v4sf;
6408 break;
6409 default:
6410 abort ();
6413 def_builtin (d->mask, d->name, type, d->code);
6417 static void
6418 rs6000_common_init_builtins ()
6420 struct builtin_description *d;
6421 size_t i;
6423 tree v4sf_ftype_v4sf_v4sf_v16qi
6424 = build_function_type_list (V4SF_type_node,
6425 V4SF_type_node, V4SF_type_node,
6426 V16QI_type_node, NULL_TREE);
6427 tree v4si_ftype_v4si_v4si_v16qi
6428 = build_function_type_list (V4SI_type_node,
6429 V4SI_type_node, V4SI_type_node,
6430 V16QI_type_node, NULL_TREE);
6431 tree v8hi_ftype_v8hi_v8hi_v16qi
6432 = build_function_type_list (V8HI_type_node,
6433 V8HI_type_node, V8HI_type_node,
6434 V16QI_type_node, NULL_TREE);
6435 tree v16qi_ftype_v16qi_v16qi_v16qi
6436 = build_function_type_list (V16QI_type_node,
6437 V16QI_type_node, V16QI_type_node,
6438 V16QI_type_node, NULL_TREE);
6439 tree v4si_ftype_char
6440 = build_function_type_list (V4SI_type_node, char_type_node, NULL_TREE);
6441 tree v8hi_ftype_char
6442 = build_function_type_list (V8HI_type_node, char_type_node, NULL_TREE);
6443 tree v16qi_ftype_char
6444 = build_function_type_list (V16QI_type_node, char_type_node, NULL_TREE);
6445 tree v8hi_ftype_v16qi
6446 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
6447 tree v4sf_ftype_v4sf
6448 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
6450 tree v2si_ftype_v2si_v2si
6451 = build_function_type_list (opaque_V2SI_type_node,
6452 opaque_V2SI_type_node,
6453 opaque_V2SI_type_node, NULL_TREE);
6455 tree v2sf_ftype_v2sf_v2sf
6456 = build_function_type_list (opaque_V2SF_type_node,
6457 opaque_V2SF_type_node,
6458 opaque_V2SF_type_node, NULL_TREE);
6460 tree v2si_ftype_int_int
6461 = build_function_type_list (opaque_V2SI_type_node,
6462 integer_type_node, integer_type_node,
6463 NULL_TREE);
6465 tree v2si_ftype_v2si
6466 = build_function_type_list (opaque_V2SI_type_node,
6467 opaque_V2SI_type_node, NULL_TREE);
6469 tree v2sf_ftype_v2sf
6470 = build_function_type_list (opaque_V2SF_type_node,
6471 opaque_V2SF_type_node, NULL_TREE);
6473 tree v2sf_ftype_v2si
6474 = build_function_type_list (opaque_V2SF_type_node,
6475 opaque_V2SI_type_node, NULL_TREE);
6477 tree v2si_ftype_v2sf
6478 = build_function_type_list (opaque_V2SI_type_node,
6479 opaque_V2SF_type_node, NULL_TREE);
6481 tree v2si_ftype_v2si_char
6482 = build_function_type_list (opaque_V2SI_type_node,
6483 opaque_V2SI_type_node,
6484 char_type_node, NULL_TREE);
6486 tree v2si_ftype_int_char
6487 = build_function_type_list (opaque_V2SI_type_node,
6488 integer_type_node, char_type_node, NULL_TREE);
6490 tree v2si_ftype_char
6491 = build_function_type_list (opaque_V2SI_type_node,
6492 char_type_node, NULL_TREE);
6494 tree int_ftype_int_int
6495 = build_function_type_list (integer_type_node,
6496 integer_type_node, integer_type_node,
6497 NULL_TREE);
6499 tree v4si_ftype_v4si_v4si
6500 = build_function_type_list (V4SI_type_node,
6501 V4SI_type_node, V4SI_type_node, NULL_TREE);
6502 tree v4sf_ftype_v4si_char
6503 = build_function_type_list (V4SF_type_node,
6504 V4SI_type_node, char_type_node, NULL_TREE);
6505 tree v4si_ftype_v4sf_char
6506 = build_function_type_list (V4SI_type_node,
6507 V4SF_type_node, char_type_node, NULL_TREE);
6508 tree v4si_ftype_v4si_char
6509 = build_function_type_list (V4SI_type_node,
6510 V4SI_type_node, char_type_node, NULL_TREE);
6511 tree v8hi_ftype_v8hi_char
6512 = build_function_type_list (V8HI_type_node,
6513 V8HI_type_node, char_type_node, NULL_TREE);
6514 tree v16qi_ftype_v16qi_char
6515 = build_function_type_list (V16QI_type_node,
6516 V16QI_type_node, char_type_node, NULL_TREE);
6517 tree v16qi_ftype_v16qi_v16qi_char
6518 = build_function_type_list (V16QI_type_node,
6519 V16QI_type_node, V16QI_type_node,
6520 char_type_node, NULL_TREE);
6521 tree v8hi_ftype_v8hi_v8hi_char
6522 = build_function_type_list (V8HI_type_node,
6523 V8HI_type_node, V8HI_type_node,
6524 char_type_node, NULL_TREE);
6525 tree v4si_ftype_v4si_v4si_char
6526 = build_function_type_list (V4SI_type_node,
6527 V4SI_type_node, V4SI_type_node,
6528 char_type_node, NULL_TREE);
6529 tree v4sf_ftype_v4sf_v4sf_char
6530 = build_function_type_list (V4SF_type_node,
6531 V4SF_type_node, V4SF_type_node,
6532 char_type_node, NULL_TREE);
6533 tree v4sf_ftype_v4sf_v4sf
6534 = build_function_type_list (V4SF_type_node,
6535 V4SF_type_node, V4SF_type_node, NULL_TREE);
6536 tree v4sf_ftype_v4sf_v4sf_v4si
6537 = build_function_type_list (V4SF_type_node,
6538 V4SF_type_node, V4SF_type_node,
6539 V4SI_type_node, NULL_TREE);
6540 tree v4sf_ftype_v4sf_v4sf_v4sf
6541 = build_function_type_list (V4SF_type_node,
6542 V4SF_type_node, V4SF_type_node,
6543 V4SF_type_node, NULL_TREE);
6544 tree v4si_ftype_v4si_v4si_v4si
6545 = build_function_type_list (V4SI_type_node,
6546 V4SI_type_node, V4SI_type_node,
6547 V4SI_type_node, NULL_TREE);
6548 tree v8hi_ftype_v8hi_v8hi
6549 = build_function_type_list (V8HI_type_node,
6550 V8HI_type_node, V8HI_type_node, NULL_TREE);
6551 tree v8hi_ftype_v8hi_v8hi_v8hi
6552 = build_function_type_list (V8HI_type_node,
6553 V8HI_type_node, V8HI_type_node,
6554 V8HI_type_node, NULL_TREE);
6555 tree v4si_ftype_v8hi_v8hi_v4si
6556 = build_function_type_list (V4SI_type_node,
6557 V8HI_type_node, V8HI_type_node,
6558 V4SI_type_node, NULL_TREE);
6559 tree v4si_ftype_v16qi_v16qi_v4si
6560 = build_function_type_list (V4SI_type_node,
6561 V16QI_type_node, V16QI_type_node,
6562 V4SI_type_node, NULL_TREE);
6563 tree v16qi_ftype_v16qi_v16qi
6564 = build_function_type_list (V16QI_type_node,
6565 V16QI_type_node, V16QI_type_node, NULL_TREE);
6566 tree v4si_ftype_v4sf_v4sf
6567 = build_function_type_list (V4SI_type_node,
6568 V4SF_type_node, V4SF_type_node, NULL_TREE);
6569 tree v8hi_ftype_v16qi_v16qi
6570 = build_function_type_list (V8HI_type_node,
6571 V16QI_type_node, V16QI_type_node, NULL_TREE);
6572 tree v4si_ftype_v8hi_v8hi
6573 = build_function_type_list (V4SI_type_node,
6574 V8HI_type_node, V8HI_type_node, NULL_TREE);
6575 tree v8hi_ftype_v4si_v4si
6576 = build_function_type_list (V8HI_type_node,
6577 V4SI_type_node, V4SI_type_node, NULL_TREE);
6578 tree v16qi_ftype_v8hi_v8hi
6579 = build_function_type_list (V16QI_type_node,
6580 V8HI_type_node, V8HI_type_node, NULL_TREE);
6581 tree v4si_ftype_v16qi_v4si
6582 = build_function_type_list (V4SI_type_node,
6583 V16QI_type_node, V4SI_type_node, NULL_TREE);
6584 tree v4si_ftype_v16qi_v16qi
6585 = build_function_type_list (V4SI_type_node,
6586 V16QI_type_node, V16QI_type_node, NULL_TREE);
6587 tree v4si_ftype_v8hi_v4si
6588 = build_function_type_list (V4SI_type_node,
6589 V8HI_type_node, V4SI_type_node, NULL_TREE);
6590 tree v4si_ftype_v8hi
6591 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
6592 tree int_ftype_v4si_v4si
6593 = build_function_type_list (integer_type_node,
6594 V4SI_type_node, V4SI_type_node, NULL_TREE);
6595 tree int_ftype_v4sf_v4sf
6596 = build_function_type_list (integer_type_node,
6597 V4SF_type_node, V4SF_type_node, NULL_TREE);
6598 tree int_ftype_v16qi_v16qi
6599 = build_function_type_list (integer_type_node,
6600 V16QI_type_node, V16QI_type_node, NULL_TREE);
6601 tree int_ftype_v8hi_v8hi
6602 = build_function_type_list (integer_type_node,
6603 V8HI_type_node, V8HI_type_node, NULL_TREE);
6605 /* Add the simple ternary operators. */
6606 d = (struct builtin_description *) bdesc_3arg;
6607 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
6610 enum machine_mode mode0, mode1, mode2, mode3;
6611 tree type;
6613 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6614 continue;
6616 mode0 = insn_data[d->icode].operand[0].mode;
6617 mode1 = insn_data[d->icode].operand[1].mode;
6618 mode2 = insn_data[d->icode].operand[2].mode;
6619 mode3 = insn_data[d->icode].operand[3].mode;
6621 /* When all four are of the same mode. */
6622 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
6624 switch (mode0)
6626 case V4SImode:
6627 type = v4si_ftype_v4si_v4si_v4si;
6628 break;
6629 case V4SFmode:
6630 type = v4sf_ftype_v4sf_v4sf_v4sf;
6631 break;
6632 case V8HImode:
6633 type = v8hi_ftype_v8hi_v8hi_v8hi;
6634 break;
6635 case V16QImode:
6636 type = v16qi_ftype_v16qi_v16qi_v16qi;
6637 break;
6638 default:
6639 abort();
6642 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
6644 switch (mode0)
6646 case V4SImode:
6647 type = v4si_ftype_v4si_v4si_v16qi;
6648 break;
6649 case V4SFmode:
6650 type = v4sf_ftype_v4sf_v4sf_v16qi;
6651 break;
6652 case V8HImode:
6653 type = v8hi_ftype_v8hi_v8hi_v16qi;
6654 break;
6655 case V16QImode:
6656 type = v16qi_ftype_v16qi_v16qi_v16qi;
6657 break;
6658 default:
6659 abort();
6662 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
6663 && mode3 == V4SImode)
6664 type = v4si_ftype_v16qi_v16qi_v4si;
6665 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
6666 && mode3 == V4SImode)
6667 type = v4si_ftype_v8hi_v8hi_v4si;
6668 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
6669 && mode3 == V4SImode)
6670 type = v4sf_ftype_v4sf_v4sf_v4si;
6672 /* vchar, vchar, vchar, 4 bit literal. */
6673 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
6674 && mode3 == QImode)
6675 type = v16qi_ftype_v16qi_v16qi_char;
6677 /* vshort, vshort, vshort, 4 bit literal. */
6678 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
6679 && mode3 == QImode)
6680 type = v8hi_ftype_v8hi_v8hi_char;
6682 /* vint, vint, vint, 4 bit literal. */
6683 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
6684 && mode3 == QImode)
6685 type = v4si_ftype_v4si_v4si_char;
6687 /* vfloat, vfloat, vfloat, 4 bit literal. */
6688 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
6689 && mode3 == QImode)
6690 type = v4sf_ftype_v4sf_v4sf_char;
6692 else
6693 abort ();
6695 def_builtin (d->mask, d->name, type, d->code);
6698 /* Add the simple binary operators. */
6699 d = (struct builtin_description *) bdesc_2arg;
6700 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
6702 enum machine_mode mode0, mode1, mode2;
6703 tree type;
6705 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6706 continue;
6708 mode0 = insn_data[d->icode].operand[0].mode;
6709 mode1 = insn_data[d->icode].operand[1].mode;
6710 mode2 = insn_data[d->icode].operand[2].mode;
6712 /* When all three operands are of the same mode. */
6713 if (mode0 == mode1 && mode1 == mode2)
6715 switch (mode0)
6717 case V4SFmode:
6718 type = v4sf_ftype_v4sf_v4sf;
6719 break;
6720 case V4SImode:
6721 type = v4si_ftype_v4si_v4si;
6722 break;
6723 case V16QImode:
6724 type = v16qi_ftype_v16qi_v16qi;
6725 break;
6726 case V8HImode:
6727 type = v8hi_ftype_v8hi_v8hi;
6728 break;
6729 case V2SImode:
6730 type = v2si_ftype_v2si_v2si;
6731 break;
6732 case V2SFmode:
6733 type = v2sf_ftype_v2sf_v2sf;
6734 break;
6735 case SImode:
6736 type = int_ftype_int_int;
6737 break;
6738 default:
6739 abort ();
6743 /* A few other combos we really don't want to do manually. */
6745 /* vint, vfloat, vfloat. */
6746 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
6747 type = v4si_ftype_v4sf_v4sf;
6749 /* vshort, vchar, vchar. */
6750 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
6751 type = v8hi_ftype_v16qi_v16qi;
6753 /* vint, vshort, vshort. */
6754 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
6755 type = v4si_ftype_v8hi_v8hi;
6757 /* vshort, vint, vint. */
6758 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
6759 type = v8hi_ftype_v4si_v4si;
6761 /* vchar, vshort, vshort. */
6762 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
6763 type = v16qi_ftype_v8hi_v8hi;
6765 /* vint, vchar, vint. */
6766 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
6767 type = v4si_ftype_v16qi_v4si;
6769 /* vint, vchar, vchar. */
6770 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
6771 type = v4si_ftype_v16qi_v16qi;
6773 /* vint, vshort, vint. */
6774 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
6775 type = v4si_ftype_v8hi_v4si;
6777 /* vint, vint, 5 bit literal. */
6778 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
6779 type = v4si_ftype_v4si_char;
6781 /* vshort, vshort, 5 bit literal. */
6782 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
6783 type = v8hi_ftype_v8hi_char;
6785 /* vchar, vchar, 5 bit literal. */
6786 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
6787 type = v16qi_ftype_v16qi_char;
6789 /* vfloat, vint, 5 bit literal. */
6790 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
6791 type = v4sf_ftype_v4si_char;
6793 /* vint, vfloat, 5 bit literal. */
6794 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
6795 type = v4si_ftype_v4sf_char;
6797 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
6798 type = v2si_ftype_int_int;
6800 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
6801 type = v2si_ftype_v2si_char;
6803 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
6804 type = v2si_ftype_int_char;
6806 /* int, x, x. */
6807 else if (mode0 == SImode)
6809 switch (mode1)
6811 case V4SImode:
6812 type = int_ftype_v4si_v4si;
6813 break;
6814 case V4SFmode:
6815 type = int_ftype_v4sf_v4sf;
6816 break;
6817 case V16QImode:
6818 type = int_ftype_v16qi_v16qi;
6819 break;
6820 case V8HImode:
6821 type = int_ftype_v8hi_v8hi;
6822 break;
6823 default:
6824 abort ();
6828 else
6829 abort ();
6831 def_builtin (d->mask, d->name, type, d->code);
6834 /* Add the simple unary operators. */
6835 d = (struct builtin_description *) bdesc_1arg;
6836 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6838 enum machine_mode mode0, mode1;
6839 tree type;
6841 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6842 continue;
6844 mode0 = insn_data[d->icode].operand[0].mode;
6845 mode1 = insn_data[d->icode].operand[1].mode;
6847 if (mode0 == V4SImode && mode1 == QImode)
6848 type = v4si_ftype_char;
6849 else if (mode0 == V8HImode && mode1 == QImode)
6850 type = v8hi_ftype_char;
6851 else if (mode0 == V16QImode && mode1 == QImode)
6852 type = v16qi_ftype_char;
6853 else if (mode0 == V4SFmode && mode1 == V4SFmode)
6854 type = v4sf_ftype_v4sf;
6855 else if (mode0 == V8HImode && mode1 == V16QImode)
6856 type = v8hi_ftype_v16qi;
6857 else if (mode0 == V4SImode && mode1 == V8HImode)
6858 type = v4si_ftype_v8hi;
6859 else if (mode0 == V2SImode && mode1 == V2SImode)
6860 type = v2si_ftype_v2si;
6861 else if (mode0 == V2SFmode && mode1 == V2SFmode)
6862 type = v2sf_ftype_v2sf;
6863 else if (mode0 == V2SFmode && mode1 == V2SImode)
6864 type = v2sf_ftype_v2si;
6865 else if (mode0 == V2SImode && mode1 == V2SFmode)
6866 type = v2si_ftype_v2sf;
6867 else if (mode0 == V2SImode && mode1 == QImode)
6868 type = v2si_ftype_char;
6869 else
6870 abort ();
6872 def_builtin (d->mask, d->name, type, d->code);
6877 /* Expand a block move operation, and return 1 if successful. Return 0
6878 if we should let the compiler generate normal code.
6880 operands[0] is the destination
6881 operands[1] is the source
6882 operands[2] is the length
6883 operands[3] is the alignment */
6885 #define MAX_MOVE_REG 4
6888 expand_block_move (operands)
6889 rtx operands[];
6891 rtx orig_dest = operands[0];
6892 rtx orig_src = operands[1];
6893 rtx bytes_rtx = operands[2];
6894 rtx align_rtx = operands[3];
6895 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
6896 int align;
6897 int bytes;
6898 int offset;
6899 int move_bytes;
6900 rtx stores[MAX_MOVE_REG];
6901 int num_reg = 0;
6903 /* If this is not a fixed size move, just call memcpy */
6904 if (! constp)
6905 return 0;
6907 /* If this is not a fixed size alignment, abort */
6908 if (GET_CODE (align_rtx) != CONST_INT)
6909 abort ();
6910 align = INTVAL (align_rtx);
6912 /* Anything to move? */
6913 bytes = INTVAL (bytes_rtx);
6914 if (bytes <= 0)
6915 return 1;
6917 /* store_one_arg depends on expand_block_move to handle at least the size of
6918 reg_parm_stack_space. */
6919 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
6920 return 0;
6922 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
6924 union {
6925 rtx (*movstrsi) PARAMS ((rtx, rtx, rtx, rtx));
6926 rtx (*mov) PARAMS ((rtx, rtx));
6927 } gen_func;
6928 enum machine_mode mode = BLKmode;
6929 rtx src, dest;
6931 if (TARGET_STRING
6932 && bytes > 24 /* move up to 32 bytes at a time */
6933 && ! fixed_regs[5]
6934 && ! fixed_regs[6]
6935 && ! fixed_regs[7]
6936 && ! fixed_regs[8]
6937 && ! fixed_regs[9]
6938 && ! fixed_regs[10]
6939 && ! fixed_regs[11]
6940 && ! fixed_regs[12])
6942 move_bytes = (bytes > 32) ? 32 : bytes;
6943 gen_func.movstrsi = gen_movstrsi_8reg;
6945 else if (TARGET_STRING
6946 && bytes > 16 /* move up to 24 bytes at a time */
6947 && ! fixed_regs[5]
6948 && ! fixed_regs[6]
6949 && ! fixed_regs[7]
6950 && ! fixed_regs[8]
6951 && ! fixed_regs[9]
6952 && ! fixed_regs[10])
6954 move_bytes = (bytes > 24) ? 24 : bytes;
6955 gen_func.movstrsi = gen_movstrsi_6reg;
6957 else if (TARGET_STRING
6958 && bytes > 8 /* move up to 16 bytes at a time */
6959 && ! fixed_regs[5]
6960 && ! fixed_regs[6]
6961 && ! fixed_regs[7]
6962 && ! fixed_regs[8])
6964 move_bytes = (bytes > 16) ? 16 : bytes;
6965 gen_func.movstrsi = gen_movstrsi_4reg;
6967 else if (bytes >= 8 && TARGET_POWERPC64
6968 /* 64-bit loads and stores require word-aligned
6969 displacements. */
6970 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
6972 move_bytes = 8;
6973 mode = DImode;
6974 gen_func.mov = gen_movdi;
6976 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
6977 { /* move up to 8 bytes at a time */
6978 move_bytes = (bytes > 8) ? 8 : bytes;
6979 gen_func.movstrsi = gen_movstrsi_2reg;
6981 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
6982 { /* move 4 bytes */
6983 move_bytes = 4;
6984 mode = SImode;
6985 gen_func.mov = gen_movsi;
6987 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
6988 { /* move 2 bytes */
6989 move_bytes = 2;
6990 mode = HImode;
6991 gen_func.mov = gen_movhi;
6993 else if (TARGET_STRING && bytes > 1)
6994 { /* move up to 4 bytes at a time */
6995 move_bytes = (bytes > 4) ? 4 : bytes;
6996 gen_func.movstrsi = gen_movstrsi_1reg;
6998 else /* move 1 byte at a time */
7000 move_bytes = 1;
7001 mode = QImode;
7002 gen_func.mov = gen_movqi;
7005 src = adjust_address (orig_src, mode, offset);
7006 dest = adjust_address (orig_dest, mode, offset);
7008 if (mode != BLKmode)
7010 rtx tmp_reg = gen_reg_rtx (mode);
7012 emit_insn ((*gen_func.mov) (tmp_reg, src));
7013 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
7016 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
7018 int i;
7019 for (i = 0; i < num_reg; i++)
7020 emit_insn (stores[i]);
7021 num_reg = 0;
7024 if (mode == BLKmode)
7026 /* Move the address into scratch registers. The movstrsi
7027 patterns require zero offset. */
7028 if (!REG_P (XEXP (src, 0)))
7030 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
7031 src = replace_equiv_address (src, src_reg);
7033 set_mem_size (src, GEN_INT (move_bytes));
7035 if (!REG_P (XEXP (dest, 0)))
7037 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
7038 dest = replace_equiv_address (dest, dest_reg);
7040 set_mem_size (dest, GEN_INT (move_bytes));
7042 emit_insn ((*gen_func.movstrsi) (dest, src,
7043 GEN_INT (move_bytes & 31),
7044 align_rtx));
7048 return 1;
7052 /* Return 1 if OP is a load multiple operation. It is known to be a
7053 PARALLEL and the first section will be tested. */
7056 load_multiple_operation (op, mode)
7057 rtx op;
7058 enum machine_mode mode ATTRIBUTE_UNUSED;
7060 int count = XVECLEN (op, 0);
7061 unsigned int dest_regno;
7062 rtx src_addr;
7063 int i;
7065 /* Perform a quick check so we don't blow up below. */
7066 if (count <= 1
7067 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7068 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7069 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
7070 return 0;
7072 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7073 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
7075 for (i = 1; i < count; i++)
7077 rtx elt = XVECEXP (op, 0, i);
7079 if (GET_CODE (elt) != SET
7080 || GET_CODE (SET_DEST (elt)) != REG
7081 || GET_MODE (SET_DEST (elt)) != SImode
7082 || REGNO (SET_DEST (elt)) != dest_regno + i
7083 || GET_CODE (SET_SRC (elt)) != MEM
7084 || GET_MODE (SET_SRC (elt)) != SImode
7085 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
7086 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
7087 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
7088 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
7089 return 0;
7092 return 1;
7095 /* Similar, but tests for store multiple. Here, the second vector element
7096 is a CLOBBER. It will be tested later. */
7099 store_multiple_operation (op, mode)
7100 rtx op;
7101 enum machine_mode mode ATTRIBUTE_UNUSED;
7103 int count = XVECLEN (op, 0) - 1;
7104 unsigned int src_regno;
7105 rtx dest_addr;
7106 int i;
7108 /* Perform a quick check so we don't blow up below. */
7109 if (count <= 1
7110 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7111 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
7112 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
7113 return 0;
7115 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7116 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
7118 for (i = 1; i < count; i++)
7120 rtx elt = XVECEXP (op, 0, i + 1);
7122 if (GET_CODE (elt) != SET
7123 || GET_CODE (SET_SRC (elt)) != REG
7124 || GET_MODE (SET_SRC (elt)) != SImode
7125 || REGNO (SET_SRC (elt)) != src_regno + i
7126 || GET_CODE (SET_DEST (elt)) != MEM
7127 || GET_MODE (SET_DEST (elt)) != SImode
7128 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
7129 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
7130 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
7131 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
7132 return 0;
7135 return 1;
7138 /* Return a string to perform a load_multiple operation.
7139 operands[0] is the vector.
7140 operands[1] is the source address.
7141 operands[2] is the first destination register. */
7143 const char *
7144 rs6000_output_load_multiple (operands)
7145 rtx operands[3];
7147 /* We have to handle the case where the pseudo used to contain the address
7148 is assigned to one of the output registers. */
7149 int i, j;
7150 int words = XVECLEN (operands[0], 0);
7151 rtx xop[10];
7153 if (XVECLEN (operands[0], 0) == 1)
7154 return "{l|lwz} %2,0(%1)";
7156 for (i = 0; i < words; i++)
7157 if (refers_to_regno_p (REGNO (operands[2]) + i,
7158 REGNO (operands[2]) + i + 1, operands[1], 0))
7160 if (i == words-1)
7162 xop[0] = GEN_INT (4 * (words-1));
7163 xop[1] = operands[1];
7164 xop[2] = operands[2];
7165 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
7166 return "";
7168 else if (i == 0)
7170 xop[0] = GEN_INT (4 * (words-1));
7171 xop[1] = operands[1];
7172 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
7173 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
7174 return "";
7176 else
7178 for (j = 0; j < words; j++)
7179 if (j != i)
7181 xop[0] = GEN_INT (j * 4);
7182 xop[1] = operands[1];
7183 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
7184 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
7186 xop[0] = GEN_INT (i * 4);
7187 xop[1] = operands[1];
7188 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
7189 return "";
7193 return "{lsi|lswi} %2,%1,%N0";
7196 /* Return 1 for a parallel vrsave operation. */
7199 vrsave_operation (op, mode)
7200 rtx op;
7201 enum machine_mode mode ATTRIBUTE_UNUSED;
7203 int count = XVECLEN (op, 0);
7204 unsigned int dest_regno, src_regno;
7205 int i;
7207 if (count <= 1
7208 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7209 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7210 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
7211 return 0;
7213 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7214 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7216 if (dest_regno != VRSAVE_REGNO
7217 && src_regno != VRSAVE_REGNO)
7218 return 0;
7220 for (i = 1; i < count; i++)
7222 rtx elt = XVECEXP (op, 0, i);
7224 if (GET_CODE (elt) != CLOBBER
7225 && GET_CODE (elt) != SET)
7226 return 0;
7229 return 1;
7232 /* Return 1 for an PARALLEL suitable for mtcrf. */
7235 mtcrf_operation (op, mode)
7236 rtx op;
7237 enum machine_mode mode ATTRIBUTE_UNUSED;
7239 int count = XVECLEN (op, 0);
7240 int i;
7241 rtx src_reg;
7243 /* Perform a quick check so we don't blow up below. */
7244 if (count < 1
7245 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7246 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
7247 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
7248 return 0;
7249 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
7251 if (GET_CODE (src_reg) != REG
7252 || GET_MODE (src_reg) != SImode
7253 || ! INT_REGNO_P (REGNO (src_reg)))
7254 return 0;
7256 for (i = 0; i < count; i++)
7258 rtx exp = XVECEXP (op, 0, i);
7259 rtx unspec;
7260 int maskval;
7262 if (GET_CODE (exp) != SET
7263 || GET_CODE (SET_DEST (exp)) != REG
7264 || GET_MODE (SET_DEST (exp)) != CCmode
7265 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
7266 return 0;
7267 unspec = SET_SRC (exp);
7268 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
7270 if (GET_CODE (unspec) != UNSPEC
7271 || XINT (unspec, 1) != UNSPEC_MOVESI_TO_CR
7272 || XVECLEN (unspec, 0) != 2
7273 || XVECEXP (unspec, 0, 0) != src_reg
7274 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
7275 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
7276 return 0;
7278 return 1;
7281 /* Return 1 for an PARALLEL suitable for lmw. */
7284 lmw_operation (op, mode)
7285 rtx op;
7286 enum machine_mode mode ATTRIBUTE_UNUSED;
7288 int count = XVECLEN (op, 0);
7289 unsigned int dest_regno;
7290 rtx src_addr;
7291 unsigned int base_regno;
7292 HOST_WIDE_INT offset;
7293 int i;
7295 /* Perform a quick check so we don't blow up below. */
7296 if (count <= 1
7297 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7298 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7299 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
7300 return 0;
7302 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7303 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
7305 if (dest_regno > 31
7306 || count != 32 - (int) dest_regno)
7307 return 0;
7309 if (legitimate_indirect_address_p (src_addr, 0))
7311 offset = 0;
7312 base_regno = REGNO (src_addr);
7313 if (base_regno == 0)
7314 return 0;
7316 else if (legitimate_offset_address_p (SImode, src_addr, 0))
7318 offset = INTVAL (XEXP (src_addr, 1));
7319 base_regno = REGNO (XEXP (src_addr, 0));
7321 else
7322 return 0;
7324 for (i = 0; i < count; i++)
7326 rtx elt = XVECEXP (op, 0, i);
7327 rtx newaddr;
7328 rtx addr_reg;
7329 HOST_WIDE_INT newoffset;
7331 if (GET_CODE (elt) != SET
7332 || GET_CODE (SET_DEST (elt)) != REG
7333 || GET_MODE (SET_DEST (elt)) != SImode
7334 || REGNO (SET_DEST (elt)) != dest_regno + i
7335 || GET_CODE (SET_SRC (elt)) != MEM
7336 || GET_MODE (SET_SRC (elt)) != SImode)
7337 return 0;
7338 newaddr = XEXP (SET_SRC (elt), 0);
7339 if (legitimate_indirect_address_p (newaddr, 0))
7341 newoffset = 0;
7342 addr_reg = newaddr;
7344 else if (legitimate_offset_address_p (SImode, newaddr, 0))
7346 addr_reg = XEXP (newaddr, 0);
7347 newoffset = INTVAL (XEXP (newaddr, 1));
7349 else
7350 return 0;
7351 if (REGNO (addr_reg) != base_regno
7352 || newoffset != offset + 4 * i)
7353 return 0;
7356 return 1;
7359 /* Return 1 for an PARALLEL suitable for stmw. */
7362 stmw_operation (op, mode)
7363 rtx op;
7364 enum machine_mode mode ATTRIBUTE_UNUSED;
7366 int count = XVECLEN (op, 0);
7367 unsigned int src_regno;
7368 rtx dest_addr;
7369 unsigned int base_regno;
7370 HOST_WIDE_INT offset;
7371 int i;
7373 /* Perform a quick check so we don't blow up below. */
7374 if (count <= 1
7375 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7376 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
7377 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
7378 return 0;
7380 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7381 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
7383 if (src_regno > 31
7384 || count != 32 - (int) src_regno)
7385 return 0;
7387 if (legitimate_indirect_address_p (dest_addr, 0))
7389 offset = 0;
7390 base_regno = REGNO (dest_addr);
7391 if (base_regno == 0)
7392 return 0;
7394 else if (legitimate_offset_address_p (SImode, dest_addr, 0))
7396 offset = INTVAL (XEXP (dest_addr, 1));
7397 base_regno = REGNO (XEXP (dest_addr, 0));
7399 else
7400 return 0;
7402 for (i = 0; i < count; i++)
7404 rtx elt = XVECEXP (op, 0, i);
7405 rtx newaddr;
7406 rtx addr_reg;
7407 HOST_WIDE_INT newoffset;
7409 if (GET_CODE (elt) != SET
7410 || GET_CODE (SET_SRC (elt)) != REG
7411 || GET_MODE (SET_SRC (elt)) != SImode
7412 || REGNO (SET_SRC (elt)) != src_regno + i
7413 || GET_CODE (SET_DEST (elt)) != MEM
7414 || GET_MODE (SET_DEST (elt)) != SImode)
7415 return 0;
7416 newaddr = XEXP (SET_DEST (elt), 0);
7417 if (legitimate_indirect_address_p (newaddr, 0))
7419 newoffset = 0;
7420 addr_reg = newaddr;
7422 else if (legitimate_offset_address_p (SImode, newaddr, 0))
7424 addr_reg = XEXP (newaddr, 0);
7425 newoffset = INTVAL (XEXP (newaddr, 1));
7427 else
7428 return 0;
7429 if (REGNO (addr_reg) != base_regno
7430 || newoffset != offset + 4 * i)
7431 return 0;
7434 return 1;
7437 /* A validation routine: say whether CODE, a condition code, and MODE
7438 match. The other alternatives either don't make sense or should
7439 never be generated. */
7441 static void
7442 validate_condition_mode (code, mode)
7443 enum rtx_code code;
7444 enum machine_mode mode;
7446 if (GET_RTX_CLASS (code) != '<'
7447 || GET_MODE_CLASS (mode) != MODE_CC)
7448 abort ();
7450 /* These don't make sense. */
7451 if ((code == GT || code == LT || code == GE || code == LE)
7452 && mode == CCUNSmode)
7453 abort ();
7455 if ((code == GTU || code == LTU || code == GEU || code == LEU)
7456 && mode != CCUNSmode)
7457 abort ();
7459 if (mode != CCFPmode
7460 && (code == ORDERED || code == UNORDERED
7461 || code == UNEQ || code == LTGT
7462 || code == UNGT || code == UNLT
7463 || code == UNGE || code == UNLE))
7464 abort ();
7466 /* These should never be generated except for
7467 flag_finite_math_only. */
7468 if (mode == CCFPmode
7469 && ! flag_finite_math_only
7470 && (code == LE || code == GE
7471 || code == UNEQ || code == LTGT
7472 || code == UNGT || code == UNLT))
7473 abort ();
7475 /* These are invalid; the information is not there. */
7476 if (mode == CCEQmode
7477 && code != EQ && code != NE)
7478 abort ();
7481 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
7482 We only check the opcode against the mode of the CC value here. */
7485 branch_comparison_operator (op, mode)
7486 rtx op;
7487 enum machine_mode mode ATTRIBUTE_UNUSED;
7489 enum rtx_code code = GET_CODE (op);
7490 enum machine_mode cc_mode;
7492 if (GET_RTX_CLASS (code) != '<')
7493 return 0;
7495 cc_mode = GET_MODE (XEXP (op, 0));
7496 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
7497 return 0;
7499 validate_condition_mode (code, cc_mode);
7501 return 1;
7504 /* Return 1 if OP is a comparison operation that is valid for a branch
7505 insn and which is true if the corresponding bit in the CC register
7506 is set. */
7509 branch_positive_comparison_operator (op, mode)
7510 rtx op;
7511 enum machine_mode mode;
7513 enum rtx_code code;
7515 if (! branch_comparison_operator (op, mode))
7516 return 0;
7518 code = GET_CODE (op);
7519 return (code == EQ || code == LT || code == GT
7520 || (TARGET_E500 && TARGET_HARD_FLOAT && !TARGET_FPRS && code == NE)
7521 || code == LTU || code == GTU
7522 || code == UNORDERED);
7525 /* Return 1 if OP is a comparison operation that is valid for an scc
7526 insn: it must be a positive comparison. */
7529 scc_comparison_operator (op, mode)
7530 rtx op;
7531 enum machine_mode mode;
7533 return branch_positive_comparison_operator (op, mode);
7537 trap_comparison_operator (op, mode)
7538 rtx op;
7539 enum machine_mode mode;
7541 if (mode != VOIDmode && mode != GET_MODE (op))
7542 return 0;
7543 return GET_RTX_CLASS (GET_CODE (op)) == '<';
7547 boolean_operator (op, mode)
7548 rtx op;
7549 enum machine_mode mode ATTRIBUTE_UNUSED;
7551 enum rtx_code code = GET_CODE (op);
7552 return (code == AND || code == IOR || code == XOR);
7556 boolean_or_operator (op, mode)
7557 rtx op;
7558 enum machine_mode mode ATTRIBUTE_UNUSED;
7560 enum rtx_code code = GET_CODE (op);
7561 return (code == IOR || code == XOR);
7565 min_max_operator (op, mode)
7566 rtx op;
7567 enum machine_mode mode ATTRIBUTE_UNUSED;
7569 enum rtx_code code = GET_CODE (op);
7570 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
7573 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
7574 mask required to convert the result of a rotate insn into a shift
7575 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
7578 includes_lshift_p (shiftop, andop)
7579 rtx shiftop;
7580 rtx andop;
7582 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
7584 shift_mask <<= INTVAL (shiftop);
7586 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
7589 /* Similar, but for right shift. */
7592 includes_rshift_p (shiftop, andop)
7593 rtx shiftop;
7594 rtx andop;
7596 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
7598 shift_mask >>= INTVAL (shiftop);
7600 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
7603 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
7604 to perform a left shift. It must have exactly SHIFTOP least
7605 significant 0's, then one or more 1's, then zero or more 0's. */
7608 includes_rldic_lshift_p (shiftop, andop)
7609 rtx shiftop;
7610 rtx andop;
7612 if (GET_CODE (andop) == CONST_INT)
7614 HOST_WIDE_INT c, lsb, shift_mask;
7616 c = INTVAL (andop);
7617 if (c == 0 || c == ~0)
7618 return 0;
7620 shift_mask = ~0;
7621 shift_mask <<= INTVAL (shiftop);
7623 /* Find the least significant one bit. */
7624 lsb = c & -c;
7626 /* It must coincide with the LSB of the shift mask. */
7627 if (-lsb != shift_mask)
7628 return 0;
7630 /* Invert to look for the next transition (if any). */
7631 c = ~c;
7633 /* Remove the low group of ones (originally low group of zeros). */
7634 c &= -lsb;
7636 /* Again find the lsb, and check we have all 1's above. */
7637 lsb = c & -c;
7638 return c == -lsb;
7640 else if (GET_CODE (andop) == CONST_DOUBLE
7641 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
7643 HOST_WIDE_INT low, high, lsb;
7644 HOST_WIDE_INT shift_mask_low, shift_mask_high;
7646 low = CONST_DOUBLE_LOW (andop);
7647 if (HOST_BITS_PER_WIDE_INT < 64)
7648 high = CONST_DOUBLE_HIGH (andop);
7650 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
7651 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
7652 return 0;
7654 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
7656 shift_mask_high = ~0;
7657 if (INTVAL (shiftop) > 32)
7658 shift_mask_high <<= INTVAL (shiftop) - 32;
7660 lsb = high & -high;
7662 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
7663 return 0;
7665 high = ~high;
7666 high &= -lsb;
7668 lsb = high & -high;
7669 return high == -lsb;
7672 shift_mask_low = ~0;
7673 shift_mask_low <<= INTVAL (shiftop);
7675 lsb = low & -low;
7677 if (-lsb != shift_mask_low)
7678 return 0;
7680 if (HOST_BITS_PER_WIDE_INT < 64)
7681 high = ~high;
7682 low = ~low;
7683 low &= -lsb;
7685 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
7687 lsb = high & -high;
7688 return high == -lsb;
7691 lsb = low & -low;
7692 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
7694 else
7695 return 0;
7698 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
7699 to perform a left shift. It must have SHIFTOP or more least
7700 signifigant 0's, with the remainder of the word 1's. */
7703 includes_rldicr_lshift_p (shiftop, andop)
7704 rtx shiftop;
7705 rtx andop;
7707 if (GET_CODE (andop) == CONST_INT)
7709 HOST_WIDE_INT c, lsb, shift_mask;
7711 shift_mask = ~0;
7712 shift_mask <<= INTVAL (shiftop);
7713 c = INTVAL (andop);
7715 /* Find the least signifigant one bit. */
7716 lsb = c & -c;
7718 /* It must be covered by the shift mask.
7719 This test also rejects c == 0. */
7720 if ((lsb & shift_mask) == 0)
7721 return 0;
7723 /* Check we have all 1's above the transition, and reject all 1's. */
7724 return c == -lsb && lsb != 1;
7726 else if (GET_CODE (andop) == CONST_DOUBLE
7727 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
7729 HOST_WIDE_INT low, lsb, shift_mask_low;
7731 low = CONST_DOUBLE_LOW (andop);
7733 if (HOST_BITS_PER_WIDE_INT < 64)
7735 HOST_WIDE_INT high, shift_mask_high;
7737 high = CONST_DOUBLE_HIGH (andop);
7739 if (low == 0)
7741 shift_mask_high = ~0;
7742 if (INTVAL (shiftop) > 32)
7743 shift_mask_high <<= INTVAL (shiftop) - 32;
7745 lsb = high & -high;
7747 if ((lsb & shift_mask_high) == 0)
7748 return 0;
7750 return high == -lsb;
7752 if (high != ~0)
7753 return 0;
7756 shift_mask_low = ~0;
7757 shift_mask_low <<= INTVAL (shiftop);
7759 lsb = low & -low;
7761 if ((lsb & shift_mask_low) == 0)
7762 return 0;
7764 return low == -lsb && lsb != 1;
7766 else
7767 return 0;
7770 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
7771 for lfq and stfq insns.
7773 Note reg1 and reg2 *must* be hard registers. To be sure we will
7774 abort if we are passed pseudo registers. */
7777 registers_ok_for_quad_peep (reg1, reg2)
7778 rtx reg1, reg2;
7780 /* We might have been passed a SUBREG. */
7781 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
7782 return 0;
7784 return (REGNO (reg1) == REGNO (reg2) - 1);
7787 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
7788 addr1 and addr2 must be in consecutive memory locations
7789 (addr2 == addr1 + 8). */
7792 addrs_ok_for_quad_peep (addr1, addr2)
7793 rtx addr1;
7794 rtx addr2;
7796 unsigned int reg1;
7797 int offset1;
7799 /* Extract an offset (if used) from the first addr. */
7800 if (GET_CODE (addr1) == PLUS)
7802 /* If not a REG, return zero. */
7803 if (GET_CODE (XEXP (addr1, 0)) != REG)
7804 return 0;
7805 else
7807 reg1 = REGNO (XEXP (addr1, 0));
7808 /* The offset must be constant! */
7809 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
7810 return 0;
7811 offset1 = INTVAL (XEXP (addr1, 1));
7814 else if (GET_CODE (addr1) != REG)
7815 return 0;
7816 else
7818 reg1 = REGNO (addr1);
7819 /* This was a simple (mem (reg)) expression. Offset is 0. */
7820 offset1 = 0;
7823 /* Make sure the second address is a (mem (plus (reg) (const_int)))
7824 or if it is (mem (reg)) then make sure that offset1 is -8 and the same
7825 register as addr1. */
7826 if (offset1 == -8 && GET_CODE (addr2) == REG && reg1 == REGNO (addr2))
7827 return 1;
7828 if (GET_CODE (addr2) != PLUS)
7829 return 0;
7831 if (GET_CODE (XEXP (addr2, 0)) != REG
7832 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
7833 return 0;
7835 if (reg1 != REGNO (XEXP (addr2, 0)))
7836 return 0;
7838 /* The offset for the second addr must be 8 more than the first addr. */
7839 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
7840 return 0;
7842 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
7843 instructions. */
7844 return 1;
7847 /* Return the register class of a scratch register needed to copy IN into
7848 or out of a register in CLASS in MODE. If it can be done directly,
7849 NO_REGS is returned. */
7851 enum reg_class
7852 secondary_reload_class (class, mode, in)
7853 enum reg_class class;
7854 enum machine_mode mode ATTRIBUTE_UNUSED;
7855 rtx in;
7857 int regno;
7859 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
7860 #if TARGET_MACHO
7861 && MACHOPIC_INDIRECT
7862 #endif
7865 /* We cannot copy a symbolic operand directly into anything
7866 other than BASE_REGS for TARGET_ELF. So indicate that a
7867 register from BASE_REGS is needed as an intermediate
7868 register.
7870 On Darwin, pic addresses require a load from memory, which
7871 needs a base register. */
7872 if (class != BASE_REGS
7873 && (GET_CODE (in) == SYMBOL_REF
7874 || GET_CODE (in) == HIGH
7875 || GET_CODE (in) == LABEL_REF
7876 || GET_CODE (in) == CONST))
7877 return BASE_REGS;
7880 if (GET_CODE (in) == REG)
7882 regno = REGNO (in);
7883 if (regno >= FIRST_PSEUDO_REGISTER)
7885 regno = true_regnum (in);
7886 if (regno >= FIRST_PSEUDO_REGISTER)
7887 regno = -1;
7890 else if (GET_CODE (in) == SUBREG)
7892 regno = true_regnum (in);
7893 if (regno >= FIRST_PSEUDO_REGISTER)
7894 regno = -1;
7896 else
7897 regno = -1;
7899 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
7900 into anything. */
7901 if (class == GENERAL_REGS || class == BASE_REGS
7902 || (regno >= 0 && INT_REGNO_P (regno)))
7903 return NO_REGS;
7905 /* Constants, memory, and FP registers can go into FP registers. */
7906 if ((regno == -1 || FP_REGNO_P (regno))
7907 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
7908 return NO_REGS;
7910 /* Memory, and AltiVec registers can go into AltiVec registers. */
7911 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
7912 && class == ALTIVEC_REGS)
7913 return NO_REGS;
7915 /* We can copy among the CR registers. */
7916 if ((class == CR_REGS || class == CR0_REGS)
7917 && regno >= 0 && CR_REGNO_P (regno))
7918 return NO_REGS;
7920 /* Otherwise, we need GENERAL_REGS. */
7921 return GENERAL_REGS;
7924 /* Given a comparison operation, return the bit number in CCR to test. We
7925 know this is a valid comparison.
7927 SCC_P is 1 if this is for an scc. That means that %D will have been
7928 used instead of %C, so the bits will be in different places.
7930 Return -1 if OP isn't a valid comparison for some reason. */
7933 ccr_bit (op, scc_p)
7934 rtx op;
7935 int scc_p;
7937 enum rtx_code code = GET_CODE (op);
7938 enum machine_mode cc_mode;
7939 int cc_regnum;
7940 int base_bit;
7941 rtx reg;
7943 if (GET_RTX_CLASS (code) != '<')
7944 return -1;
7946 reg = XEXP (op, 0);
7948 if (GET_CODE (reg) != REG
7949 || ! CR_REGNO_P (REGNO (reg)))
7950 abort ();
7952 cc_mode = GET_MODE (reg);
7953 cc_regnum = REGNO (reg);
7954 base_bit = 4 * (cc_regnum - CR0_REGNO);
7956 validate_condition_mode (code, cc_mode);
7958 /* When generating a sCOND operation, only positive conditions are
7959 allowed. */
7960 if (scc_p && code != EQ && code != GT && code != LT && code != UNORDERED
7961 && code != GTU && code != LTU)
7962 abort ();
7964 switch (code)
7966 case NE:
7967 if (TARGET_E500 && !TARGET_FPRS
7968 && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
7969 return base_bit + 1;
7970 return scc_p ? base_bit + 3 : base_bit + 2;
7971 case EQ:
7972 if (TARGET_E500 && !TARGET_FPRS
7973 && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
7974 return base_bit + 1;
7975 return base_bit + 2;
7976 case GT: case GTU: case UNLE:
7977 return base_bit + 1;
7978 case LT: case LTU: case UNGE:
7979 return base_bit;
7980 case ORDERED: case UNORDERED:
7981 return base_bit + 3;
7983 case GE: case GEU:
7984 /* If scc, we will have done a cror to put the bit in the
7985 unordered position. So test that bit. For integer, this is ! LT
7986 unless this is an scc insn. */
7987 return scc_p ? base_bit + 3 : base_bit;
7989 case LE: case LEU:
7990 return scc_p ? base_bit + 3 : base_bit + 1;
7992 default:
7993 abort ();
7997 /* Return the GOT register. */
7999 struct rtx_def *
8000 rs6000_got_register (value)
8001 rtx value ATTRIBUTE_UNUSED;
8003 /* The second flow pass currently (June 1999) can't update
8004 regs_ever_live without disturbing other parts of the compiler, so
8005 update it here to make the prolog/epilogue code happy. */
8006 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
8007 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
8009 current_function_uses_pic_offset_table = 1;
8011 return pic_offset_table_rtx;
8014 /* Function to init struct machine_function.
8015 This will be called, via a pointer variable,
8016 from push_function_context. */
8018 static struct machine_function *
8019 rs6000_init_machine_status ()
8021 return ggc_alloc_cleared (sizeof (machine_function));
8024 /* These macros test for integers and extract the low-order bits. */
8025 #define INT_P(X) \
8026 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
8027 && GET_MODE (X) == VOIDmode)
8029 #define INT_LOWPART(X) \
8030 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
8033 extract_MB (op)
8034 rtx op;
8036 int i;
8037 unsigned long val = INT_LOWPART (op);
8039 /* If the high bit is zero, the value is the first 1 bit we find
8040 from the left. */
8041 if ((val & 0x80000000) == 0)
8043 if ((val & 0xffffffff) == 0)
8044 abort ();
8046 i = 1;
8047 while (((val <<= 1) & 0x80000000) == 0)
8048 ++i;
8049 return i;
8052 /* If the high bit is set and the low bit is not, or the mask is all
8053 1's, the value is zero. */
8054 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
8055 return 0;
8057 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
8058 from the right. */
8059 i = 31;
8060 while (((val >>= 1) & 1) != 0)
8061 --i;
8063 return i;
8067 extract_ME (op)
8068 rtx op;
8070 int i;
8071 unsigned long val = INT_LOWPART (op);
8073 /* If the low bit is zero, the value is the first 1 bit we find from
8074 the right. */
8075 if ((val & 1) == 0)
8077 if ((val & 0xffffffff) == 0)
8078 abort ();
8080 i = 30;
8081 while (((val >>= 1) & 1) == 0)
8082 --i;
8084 return i;
8087 /* If the low bit is set and the high bit is not, or the mask is all
8088 1's, the value is 31. */
8089 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
8090 return 31;
8092 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
8093 from the left. */
8094 i = 0;
8095 while (((val <<= 1) & 0x80000000) != 0)
8096 ++i;
8098 return i;
8101 /* Locate some local-dynamic symbol still in use by this function
8102 so that we can print its name in some tls_ld pattern. */
8104 static const char *
8105 rs6000_get_some_local_dynamic_name ()
8107 rtx insn;
8109 if (cfun->machine->some_ld_name)
8110 return cfun->machine->some_ld_name;
8112 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
8113 if (INSN_P (insn)
8114 && for_each_rtx (&PATTERN (insn),
8115 rs6000_get_some_local_dynamic_name_1, 0))
8116 return cfun->machine->some_ld_name;
8118 abort ();
8121 /* Helper function for rs6000_get_some_local_dynamic_name. */
8123 static int
8124 rs6000_get_some_local_dynamic_name_1 (px, data)
8125 rtx *px;
8126 void *data ATTRIBUTE_UNUSED;
8128 rtx x = *px;
8130 if (GET_CODE (x) == SYMBOL_REF)
8132 const char *str = XSTR (x, 0);
8133 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
8135 cfun->machine->some_ld_name = str;
8136 return 1;
8140 return 0;
8143 /* Print an operand. Recognize special options, documented below. */
8145 #if TARGET_ELF
8146 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
8147 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
8148 #else
8149 #define SMALL_DATA_RELOC "sda21"
8150 #define SMALL_DATA_REG 0
8151 #endif
8153 void
8154 print_operand (file, x, code)
8155 FILE *file;
8156 rtx x;
8157 int code;
8159 int i;
8160 HOST_WIDE_INT val;
8161 unsigned HOST_WIDE_INT uval;
8163 switch (code)
8165 case '.':
8166 /* Write out an instruction after the call which may be replaced
8167 with glue code by the loader. This depends on the AIX version. */
8168 asm_fprintf (file, RS6000_CALL_GLUE);
8169 return;
8171 /* %a is output_address. */
8173 case 'A':
8174 /* If X is a constant integer whose low-order 5 bits are zero,
8175 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
8176 in the AIX assembler where "sri" with a zero shift count
8177 writes a trash instruction. */
8178 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
8179 putc ('l', file);
8180 else
8181 putc ('r', file);
8182 return;
8184 case 'b':
8185 /* If constant, low-order 16 bits of constant, unsigned.
8186 Otherwise, write normally. */
8187 if (INT_P (x))
8188 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
8189 else
8190 print_operand (file, x, 0);
8191 return;
8193 case 'B':
8194 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
8195 for 64-bit mask direction. */
8196 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
8197 return;
8199 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
8200 output_operand. */
8202 case 'E':
8203 /* X is a CR register. Print the number of the EQ bit of the CR */
8204 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8205 output_operand_lossage ("invalid %%E value");
8206 else
8207 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
8208 return;
8210 case 'f':
8211 /* X is a CR register. Print the shift count needed to move it
8212 to the high-order four bits. */
8213 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8214 output_operand_lossage ("invalid %%f value");
8215 else
8216 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
8217 return;
8219 case 'F':
8220 /* Similar, but print the count for the rotate in the opposite
8221 direction. */
8222 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8223 output_operand_lossage ("invalid %%F value");
8224 else
8225 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
8226 return;
8228 case 'G':
8229 /* X is a constant integer. If it is negative, print "m",
8230 otherwise print "z". This is to make an aze or ame insn. */
8231 if (GET_CODE (x) != CONST_INT)
8232 output_operand_lossage ("invalid %%G value");
8233 else if (INTVAL (x) >= 0)
8234 putc ('z', file);
8235 else
8236 putc ('m', file);
8237 return;
8239 case 'h':
8240 /* If constant, output low-order five bits. Otherwise, write
8241 normally. */
8242 if (INT_P (x))
8243 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
8244 else
8245 print_operand (file, x, 0);
8246 return;
8248 case 'H':
8249 /* If constant, output low-order six bits. Otherwise, write
8250 normally. */
8251 if (INT_P (x))
8252 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
8253 else
8254 print_operand (file, x, 0);
8255 return;
8257 case 'I':
8258 /* Print `i' if this is a constant, else nothing. */
8259 if (INT_P (x))
8260 putc ('i', file);
8261 return;
8263 case 'j':
8264 /* Write the bit number in CCR for jump. */
8265 i = ccr_bit (x, 0);
8266 if (i == -1)
8267 output_operand_lossage ("invalid %%j code");
8268 else
8269 fprintf (file, "%d", i);
8270 return;
8272 case 'J':
8273 /* Similar, but add one for shift count in rlinm for scc and pass
8274 scc flag to `ccr_bit'. */
8275 i = ccr_bit (x, 1);
8276 if (i == -1)
8277 output_operand_lossage ("invalid %%J code");
8278 else
8279 /* If we want bit 31, write a shift count of zero, not 32. */
8280 fprintf (file, "%d", i == 31 ? 0 : i + 1);
8281 return;
8283 case 'k':
8284 /* X must be a constant. Write the 1's complement of the
8285 constant. */
8286 if (! INT_P (x))
8287 output_operand_lossage ("invalid %%k value");
8288 else
8289 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
8290 return;
8292 case 'K':
8293 /* X must be a symbolic constant on ELF. Write an
8294 expression suitable for an 'addi' that adds in the low 16
8295 bits of the MEM. */
8296 if (GET_CODE (x) != CONST)
8298 print_operand_address (file, x);
8299 fputs ("@l", file);
8301 else
8303 if (GET_CODE (XEXP (x, 0)) != PLUS
8304 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
8305 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
8306 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
8307 output_operand_lossage ("invalid %%K value");
8308 print_operand_address (file, XEXP (XEXP (x, 0), 0));
8309 fputs ("@l", file);
8310 /* For GNU as, there must be a non-alphanumeric character
8311 between 'l' and the number. The '-' is added by
8312 print_operand() already. */
8313 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
8314 fputs ("+", file);
8315 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
8317 return;
8319 /* %l is output_asm_label. */
8321 case 'L':
8322 /* Write second word of DImode or DFmode reference. Works on register
8323 or non-indexed memory only. */
8324 if (GET_CODE (x) == REG)
8325 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
8326 else if (GET_CODE (x) == MEM)
8328 /* Handle possible auto-increment. Since it is pre-increment and
8329 we have already done it, we can just use an offset of word. */
8330 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8331 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8332 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
8333 UNITS_PER_WORD));
8334 else
8335 output_address (XEXP (adjust_address_nv (x, SImode,
8336 UNITS_PER_WORD),
8337 0));
8339 if (small_data_operand (x, GET_MODE (x)))
8340 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8341 reg_names[SMALL_DATA_REG]);
8343 return;
8345 case 'm':
8346 /* MB value for a mask operand. */
8347 if (! mask_operand (x, SImode))
8348 output_operand_lossage ("invalid %%m value");
8350 fprintf (file, "%d", extract_MB (x));
8351 return;
8353 case 'M':
8354 /* ME value for a mask operand. */
8355 if (! mask_operand (x, SImode))
8356 output_operand_lossage ("invalid %%M value");
8358 fprintf (file, "%d", extract_ME (x));
8359 return;
8361 /* %n outputs the negative of its operand. */
8363 case 'N':
8364 /* Write the number of elements in the vector times 4. */
8365 if (GET_CODE (x) != PARALLEL)
8366 output_operand_lossage ("invalid %%N value");
8367 else
8368 fprintf (file, "%d", XVECLEN (x, 0) * 4);
8369 return;
8371 case 'O':
8372 /* Similar, but subtract 1 first. */
8373 if (GET_CODE (x) != PARALLEL)
8374 output_operand_lossage ("invalid %%O value");
8375 else
8376 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
8377 return;
8379 case 'p':
8380 /* X is a CONST_INT that is a power of two. Output the logarithm. */
8381 if (! INT_P (x)
8382 || INT_LOWPART (x) < 0
8383 || (i = exact_log2 (INT_LOWPART (x))) < 0)
8384 output_operand_lossage ("invalid %%p value");
8385 else
8386 fprintf (file, "%d", i);
8387 return;
8389 case 'P':
8390 /* The operand must be an indirect memory reference. The result
8391 is the register number. */
8392 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
8393 || REGNO (XEXP (x, 0)) >= 32)
8394 output_operand_lossage ("invalid %%P value");
8395 else
8396 fprintf (file, "%d", REGNO (XEXP (x, 0)));
8397 return;
8399 case 'q':
8400 /* This outputs the logical code corresponding to a boolean
8401 expression. The expression may have one or both operands
8402 negated (if one, only the first one). For condition register
8403 logical operations, it will also treat the negated
8404 CR codes as NOTs, but not handle NOTs of them. */
8406 const char *const *t = 0;
8407 const char *s;
8408 enum rtx_code code = GET_CODE (x);
8409 static const char * const tbl[3][3] = {
8410 { "and", "andc", "nor" },
8411 { "or", "orc", "nand" },
8412 { "xor", "eqv", "xor" } };
8414 if (code == AND)
8415 t = tbl[0];
8416 else if (code == IOR)
8417 t = tbl[1];
8418 else if (code == XOR)
8419 t = tbl[2];
8420 else
8421 output_operand_lossage ("invalid %%q value");
8423 if (GET_CODE (XEXP (x, 0)) != NOT)
8424 s = t[0];
8425 else
8427 if (GET_CODE (XEXP (x, 1)) == NOT)
8428 s = t[2];
8429 else
8430 s = t[1];
8433 fputs (s, file);
8435 return;
8437 case 'R':
8438 /* X is a CR register. Print the mask for `mtcrf'. */
8439 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8440 output_operand_lossage ("invalid %%R value");
8441 else
8442 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
8443 return;
8445 case 's':
8446 /* Low 5 bits of 32 - value */
8447 if (! INT_P (x))
8448 output_operand_lossage ("invalid %%s value");
8449 else
8450 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
8451 return;
8453 case 'S':
8454 /* PowerPC64 mask position. All 0's is excluded.
8455 CONST_INT 32-bit mask is considered sign-extended so any
8456 transition must occur within the CONST_INT, not on the boundary. */
8457 if (! mask64_operand (x, DImode))
8458 output_operand_lossage ("invalid %%S value");
8460 uval = INT_LOWPART (x);
8462 if (uval & 1) /* Clear Left */
8464 #if HOST_BITS_PER_WIDE_INT > 64
8465 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
8466 #endif
8467 i = 64;
8469 else /* Clear Right */
8471 uval = ~uval;
8472 #if HOST_BITS_PER_WIDE_INT > 64
8473 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
8474 #endif
8475 i = 63;
8477 while (uval != 0)
8478 --i, uval >>= 1;
8479 if (i < 0)
8480 abort ();
8481 fprintf (file, "%d", i);
8482 return;
8484 case 't':
8485 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
8486 if (GET_CODE (x) != REG || GET_MODE (x) != CCmode)
8487 abort ();
8489 /* Bit 3 is OV bit. */
8490 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
8492 /* If we want bit 31, write a shift count of zero, not 32. */
8493 fprintf (file, "%d", i == 31 ? 0 : i + 1);
8494 return;
8496 case 'T':
8497 /* Print the symbolic name of a branch target register. */
8498 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
8499 && REGNO (x) != COUNT_REGISTER_REGNUM))
8500 output_operand_lossage ("invalid %%T value");
8501 else if (REGNO (x) == LINK_REGISTER_REGNUM)
8502 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
8503 else
8504 fputs ("ctr", file);
8505 return;
8507 case 'u':
8508 /* High-order 16 bits of constant for use in unsigned operand. */
8509 if (! INT_P (x))
8510 output_operand_lossage ("invalid %%u value");
8511 else
8512 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
8513 (INT_LOWPART (x) >> 16) & 0xffff);
8514 return;
8516 case 'v':
8517 /* High-order 16 bits of constant for use in signed operand. */
8518 if (! INT_P (x))
8519 output_operand_lossage ("invalid %%v value");
8520 else
8521 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
8522 (INT_LOWPART (x) >> 16) & 0xffff);
8523 return;
8525 case 'U':
8526 /* Print `u' if this has an auto-increment or auto-decrement. */
8527 if (GET_CODE (x) == MEM
8528 && (GET_CODE (XEXP (x, 0)) == PRE_INC
8529 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
8530 putc ('u', file);
8531 return;
8533 case 'V':
8534 /* Print the trap code for this operand. */
8535 switch (GET_CODE (x))
8537 case EQ:
8538 fputs ("eq", file); /* 4 */
8539 break;
8540 case NE:
8541 fputs ("ne", file); /* 24 */
8542 break;
8543 case LT:
8544 fputs ("lt", file); /* 16 */
8545 break;
8546 case LE:
8547 fputs ("le", file); /* 20 */
8548 break;
8549 case GT:
8550 fputs ("gt", file); /* 8 */
8551 break;
8552 case GE:
8553 fputs ("ge", file); /* 12 */
8554 break;
8555 case LTU:
8556 fputs ("llt", file); /* 2 */
8557 break;
8558 case LEU:
8559 fputs ("lle", file); /* 6 */
8560 break;
8561 case GTU:
8562 fputs ("lgt", file); /* 1 */
8563 break;
8564 case GEU:
8565 fputs ("lge", file); /* 5 */
8566 break;
8567 default:
8568 abort ();
8570 break;
8572 case 'w':
8573 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
8574 normally. */
8575 if (INT_P (x))
8576 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
8577 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
8578 else
8579 print_operand (file, x, 0);
8580 return;
8582 case 'W':
8583 /* MB value for a PowerPC64 rldic operand. */
8584 val = (GET_CODE (x) == CONST_INT
8585 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
8587 if (val < 0)
8588 i = -1;
8589 else
8590 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
8591 if ((val <<= 1) < 0)
8592 break;
8594 #if HOST_BITS_PER_WIDE_INT == 32
8595 if (GET_CODE (x) == CONST_INT && i >= 0)
8596 i += 32; /* zero-extend high-part was all 0's */
8597 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
8599 val = CONST_DOUBLE_LOW (x);
8601 if (val == 0)
8602 abort ();
8603 else if (val < 0)
8604 --i;
8605 else
8606 for ( ; i < 64; i++)
8607 if ((val <<= 1) < 0)
8608 break;
8610 #endif
8612 fprintf (file, "%d", i + 1);
8613 return;
8615 case 'X':
8616 if (GET_CODE (x) == MEM
8617 && legitimate_indexed_address_p (XEXP (x, 0), 0))
8618 putc ('x', file);
8619 return;
8621 case 'Y':
8622 /* Like 'L', for third word of TImode */
8623 if (GET_CODE (x) == REG)
8624 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
8625 else if (GET_CODE (x) == MEM)
8627 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8628 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8629 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
8630 else
8631 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
8632 if (small_data_operand (x, GET_MODE (x)))
8633 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8634 reg_names[SMALL_DATA_REG]);
8636 return;
8638 case 'z':
8639 /* X is a SYMBOL_REF. Write out the name preceded by a
8640 period and without any trailing data in brackets. Used for function
8641 names. If we are configured for System V (or the embedded ABI) on
8642 the PowerPC, do not emit the period, since those systems do not use
8643 TOCs and the like. */
8644 if (GET_CODE (x) != SYMBOL_REF)
8645 abort ();
8647 if (XSTR (x, 0)[0] != '.')
8649 switch (DEFAULT_ABI)
8651 default:
8652 abort ();
8654 case ABI_AIX:
8655 putc ('.', file);
8656 break;
8658 case ABI_V4:
8659 case ABI_DARWIN:
8660 break;
8663 #if TARGET_AIX
8664 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
8665 #else
8666 assemble_name (file, XSTR (x, 0));
8667 #endif
8668 return;
8670 case 'Z':
8671 /* Like 'L', for last word of TImode. */
8672 if (GET_CODE (x) == REG)
8673 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
8674 else if (GET_CODE (x) == MEM)
8676 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8677 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8678 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
8679 else
8680 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
8681 if (small_data_operand (x, GET_MODE (x)))
8682 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8683 reg_names[SMALL_DATA_REG]);
8685 return;
8687 /* Print AltiVec or SPE memory operand. */
8688 case 'y':
8690 rtx tmp;
8692 if (GET_CODE (x) != MEM)
8693 abort ();
8695 tmp = XEXP (x, 0);
8697 if (TARGET_E500)
8699 /* Handle [reg]. */
8700 if (GET_CODE (tmp) == REG)
8702 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
8703 break;
8705 /* Handle [reg+UIMM]. */
8706 else if (GET_CODE (tmp) == PLUS &&
8707 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
8709 int x;
8711 if (GET_CODE (XEXP (tmp, 0)) != REG)
8712 abort ();
8714 x = INTVAL (XEXP (tmp, 1));
8715 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
8716 break;
8719 /* Fall through. Must be [reg+reg]. */
8721 if (GET_CODE (tmp) == REG)
8722 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
8723 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
8725 if (REGNO (XEXP (tmp, 0)) == 0)
8726 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
8727 reg_names[ REGNO (XEXP (tmp, 0)) ]);
8728 else
8729 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
8730 reg_names[ REGNO (XEXP (tmp, 1)) ]);
8732 else
8733 abort ();
8734 break;
8737 case 0:
8738 if (GET_CODE (x) == REG)
8739 fprintf (file, "%s", reg_names[REGNO (x)]);
8740 else if (GET_CODE (x) == MEM)
8742 /* We need to handle PRE_INC and PRE_DEC here, since we need to
8743 know the width from the mode. */
8744 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
8745 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
8746 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
8747 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
8748 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
8749 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
8750 else
8751 output_address (XEXP (x, 0));
8753 else
8754 output_addr_const (file, x);
8755 return;
8757 case '&':
8758 assemble_name (file, rs6000_get_some_local_dynamic_name ());
8759 return;
8761 default:
8762 output_operand_lossage ("invalid %%xn code");
8766 /* Print the address of an operand. */
8768 void
8769 print_operand_address (file, x)
8770 FILE *file;
8771 rtx x;
8773 if (GET_CODE (x) == REG)
8774 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
8775 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
8776 || GET_CODE (x) == LABEL_REF)
8778 output_addr_const (file, x);
8779 if (small_data_operand (x, GET_MODE (x)))
8780 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8781 reg_names[SMALL_DATA_REG]);
8782 else if (TARGET_TOC)
8783 abort ();
8785 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
8787 if (REGNO (XEXP (x, 0)) == 0)
8788 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
8789 reg_names[ REGNO (XEXP (x, 0)) ]);
8790 else
8791 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
8792 reg_names[ REGNO (XEXP (x, 1)) ]);
8794 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
8795 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
8796 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
8797 #if TARGET_ELF
8798 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
8799 && CONSTANT_P (XEXP (x, 1)))
8801 output_addr_const (file, XEXP (x, 1));
8802 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8804 #endif
8805 #if TARGET_MACHO
8806 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
8807 && CONSTANT_P (XEXP (x, 1)))
8809 fprintf (file, "lo16(");
8810 output_addr_const (file, XEXP (x, 1));
8811 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8813 #endif
8814 else if (legitimate_constant_pool_address_p (x))
8816 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
8818 rtx contains_minus = XEXP (x, 1);
8819 rtx minus, symref;
8820 const char *name;
8822 /* Find the (minus (sym) (toc)) buried in X, and temporarily
8823 turn it into (sym) for output_addr_const. */
8824 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
8825 contains_minus = XEXP (contains_minus, 0);
8827 minus = XEXP (contains_minus, 0);
8828 symref = XEXP (minus, 0);
8829 XEXP (contains_minus, 0) = symref;
8830 if (TARGET_ELF)
8832 char *newname;
8834 name = XSTR (symref, 0);
8835 newname = alloca (strlen (name) + sizeof ("@toc"));
8836 strcpy (newname, name);
8837 strcat (newname, "@toc");
8838 XSTR (symref, 0) = newname;
8840 output_addr_const (file, XEXP (x, 1));
8841 if (TARGET_ELF)
8842 XSTR (symref, 0) = name;
8843 XEXP (contains_minus, 0) = minus;
8845 else
8846 output_addr_const (file, XEXP (x, 1));
8848 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
8850 else
8851 abort ();
8854 /* Target hook for assembling integer objects. The PowerPC version has
8855 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
8856 is defined. It also needs to handle DI-mode objects on 64-bit
8857 targets. */
8859 static bool
8860 rs6000_assemble_integer (x, size, aligned_p)
8861 rtx x;
8862 unsigned int size;
8863 int aligned_p;
8865 #ifdef RELOCATABLE_NEEDS_FIXUP
8866 /* Special handling for SI values. */
8867 if (size == 4 && aligned_p)
8869 extern int in_toc_section PARAMS ((void));
8870 static int recurse = 0;
8872 /* For -mrelocatable, we mark all addresses that need to be fixed up
8873 in the .fixup section. */
8874 if (TARGET_RELOCATABLE
8875 && !in_toc_section ()
8876 && !in_text_section ()
8877 && !recurse
8878 && GET_CODE (x) != CONST_INT
8879 && GET_CODE (x) != CONST_DOUBLE
8880 && CONSTANT_P (x))
8882 char buf[256];
8884 recurse = 1;
8885 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
8886 fixuplabelno++;
8887 ASM_OUTPUT_LABEL (asm_out_file, buf);
8888 fprintf (asm_out_file, "\t.long\t(");
8889 output_addr_const (asm_out_file, x);
8890 fprintf (asm_out_file, ")@fixup\n");
8891 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
8892 ASM_OUTPUT_ALIGN (asm_out_file, 2);
8893 fprintf (asm_out_file, "\t.long\t");
8894 assemble_name (asm_out_file, buf);
8895 fprintf (asm_out_file, "\n\t.previous\n");
8896 recurse = 0;
8897 return true;
8899 /* Remove initial .'s to turn a -mcall-aixdesc function
8900 address into the address of the descriptor, not the function
8901 itself. */
8902 else if (GET_CODE (x) == SYMBOL_REF
8903 && XSTR (x, 0)[0] == '.'
8904 && DEFAULT_ABI == ABI_AIX)
8906 const char *name = XSTR (x, 0);
8907 while (*name == '.')
8908 name++;
8910 fprintf (asm_out_file, "\t.long\t%s\n", name);
8911 return true;
8914 #endif /* RELOCATABLE_NEEDS_FIXUP */
8915 return default_assemble_integer (x, size, aligned_p);
8918 #ifdef HAVE_GAS_HIDDEN
8919 /* Emit an assembler directive to set symbol visibility for DECL to
8920 VISIBILITY_TYPE. */
8922 static void
8923 rs6000_assemble_visibility (decl, vis)
8924 tree decl;
8925 int vis;
8927 /* Functions need to have their entry point symbol visibility set as
8928 well as their descriptor symbol visibility. */
8929 if (DEFAULT_ABI == ABI_AIX && TREE_CODE (decl) == FUNCTION_DECL)
8931 static const char * const visibility_types[] = {
8932 NULL, "internal", "hidden", "protected"
8935 const char *name, *type;
8937 name = ((* targetm.strip_name_encoding)
8938 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
8939 type = visibility_types[vis];
8941 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
8942 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
8944 else
8945 default_assemble_visibility (decl, vis);
8947 #endif
8949 enum rtx_code
8950 rs6000_reverse_condition (mode, code)
8951 enum machine_mode mode;
8952 enum rtx_code code;
8954 /* Reversal of FP compares takes care -- an ordered compare
8955 becomes an unordered compare and vice versa. */
8956 if (mode == CCFPmode
8957 && (!flag_finite_math_only
8958 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
8959 || code == UNEQ || code == LTGT))
8960 return reverse_condition_maybe_unordered (code);
8961 else
8962 return reverse_condition (code);
8965 /* Generate a compare for CODE. Return a brand-new rtx that
8966 represents the result of the compare. */
8968 static rtx
8969 rs6000_generate_compare (code)
8970 enum rtx_code code;
8972 enum machine_mode comp_mode;
8973 rtx compare_result;
8975 if (rs6000_compare_fp_p)
8976 comp_mode = CCFPmode;
8977 else if (code == GTU || code == LTU
8978 || code == GEU || code == LEU)
8979 comp_mode = CCUNSmode;
8980 else
8981 comp_mode = CCmode;
8983 /* First, the compare. */
8984 compare_result = gen_reg_rtx (comp_mode);
8986 /* SPE FP compare instructions on the GPRs. Yuck! */
8987 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT)
8988 && rs6000_compare_fp_p)
8990 rtx cmp, or1, or2, or_result, compare_result2;
8992 switch (code)
8994 case EQ:
8995 case UNEQ:
8996 case NE:
8997 case LTGT:
8998 cmp = flag_finite_math_only
8999 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
9000 rs6000_compare_op1)
9001 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
9002 rs6000_compare_op1);
9003 break;
9004 case GT:
9005 case GTU:
9006 case UNGT:
9007 case UNGE:
9008 case GE:
9009 case GEU:
9010 cmp = flag_finite_math_only
9011 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
9012 rs6000_compare_op1)
9013 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
9014 rs6000_compare_op1);
9015 break;
9016 case LT:
9017 case LTU:
9018 case UNLT:
9019 case UNLE:
9020 case LE:
9021 case LEU:
9022 cmp = flag_finite_math_only
9023 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
9024 rs6000_compare_op1)
9025 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
9026 rs6000_compare_op1);
9027 break;
9028 default:
9029 abort ();
9032 /* Synthesize LE and GE from LT/GT || EQ. */
9033 if (code == LE || code == GE || code == LEU || code == GEU)
9035 /* Synthesize GE/LE frome GT/LT || EQ. */
9037 emit_insn (cmp);
9039 switch (code)
9041 case LE: code = LT; break;
9042 case GE: code = GT; break;
9043 case LEU: code = LT; break;
9044 case GEU: code = GT; break;
9045 default: abort ();
9048 or1 = gen_reg_rtx (SImode);
9049 or2 = gen_reg_rtx (SImode);
9050 or_result = gen_reg_rtx (CCEQmode);
9051 compare_result2 = gen_reg_rtx (CCFPmode);
9053 /* Do the EQ. */
9054 cmp = flag_finite_math_only
9055 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
9056 rs6000_compare_op1)
9057 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
9058 rs6000_compare_op1);
9059 emit_insn (cmp);
9061 /* The MC8540 FP compare instructions set the CR bits
9062 differently than other PPC compare instructions. For
9063 that matter, there is no generic test instruction, but a
9064 testgt, testlt, and testeq. For a true condition, bit 2
9065 is set (x1xx) in the CR. Following the traditional CR
9066 values:
9068 LT GT EQ OV
9069 bit3 bit2 bit1 bit0
9071 ... bit 2 would be a GT CR alias, so later on we
9072 look in the GT bits for the branch instructions.
9073 However, we must be careful to emit correct RTL in
9074 the meantime, so optimizations don't get confused. */
9076 or1 = gen_rtx (NE, SImode, compare_result, const0_rtx);
9077 or2 = gen_rtx (NE, SImode, compare_result2, const0_rtx);
9079 /* OR them together. */
9080 cmp = gen_rtx_SET (VOIDmode, or_result,
9081 gen_rtx_COMPARE (CCEQmode,
9082 gen_rtx_IOR (SImode, or1, or2),
9083 const_true_rtx));
9084 compare_result = or_result;
9085 code = EQ;
9087 else
9089 /* We only care about 1 bit (x1xx), so map everything to NE to
9090 maintain rtl sanity. We'll get to the right bit (x1xx) at
9091 code output time. */
9092 if (code == NE || code == LTGT)
9093 /* Do the inverse here because we have no cmpne
9094 instruction. We use the cmpeq instruction and expect
9095 to get a 0 instead. */
9096 code = EQ;
9097 else
9098 code = NE;
9101 emit_insn (cmp);
9103 else
9104 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
9105 gen_rtx_COMPARE (comp_mode,
9106 rs6000_compare_op0,
9107 rs6000_compare_op1)));
9109 /* Some kinds of FP comparisons need an OR operation;
9110 under flag_finite_math_only we don't bother. */
9111 if (rs6000_compare_fp_p
9112 && ! flag_finite_math_only
9113 && ! (TARGET_HARD_FLOAT && TARGET_E500 && !TARGET_FPRS)
9114 && (code == LE || code == GE
9115 || code == UNEQ || code == LTGT
9116 || code == UNGT || code == UNLT))
9118 enum rtx_code or1, or2;
9119 rtx or1_rtx, or2_rtx, compare2_rtx;
9120 rtx or_result = gen_reg_rtx (CCEQmode);
9122 switch (code)
9124 case LE: or1 = LT; or2 = EQ; break;
9125 case GE: or1 = GT; or2 = EQ; break;
9126 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
9127 case LTGT: or1 = LT; or2 = GT; break;
9128 case UNGT: or1 = UNORDERED; or2 = GT; break;
9129 case UNLT: or1 = UNORDERED; or2 = LT; break;
9130 default: abort ();
9132 validate_condition_mode (or1, comp_mode);
9133 validate_condition_mode (or2, comp_mode);
9134 or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
9135 or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
9136 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
9137 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
9138 const_true_rtx);
9139 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
9141 compare_result = or_result;
9142 code = EQ;
9145 validate_condition_mode (code, GET_MODE (compare_result));
9147 return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
9151 /* Emit the RTL for an sCOND pattern. */
9153 void
9154 rs6000_emit_sCOND (code, result)
9155 enum rtx_code code;
9156 rtx result;
9158 rtx condition_rtx;
9159 enum machine_mode op_mode;
9160 enum rtx_code cond_code;
9162 condition_rtx = rs6000_generate_compare (code);
9163 cond_code = GET_CODE (condition_rtx);
9165 if (cond_code == NE
9166 || cond_code == GE || cond_code == LE
9167 || cond_code == GEU || cond_code == LEU
9168 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
9170 rtx not_result = gen_reg_rtx (CCEQmode);
9171 rtx not_op, rev_cond_rtx;
9172 enum machine_mode cc_mode;
9174 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
9176 rev_cond_rtx = gen_rtx (rs6000_reverse_condition (cc_mode, cond_code),
9177 SImode, XEXP (condition_rtx, 0), const0_rtx);
9178 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
9179 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
9180 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
9183 op_mode = GET_MODE (rs6000_compare_op0);
9184 if (op_mode == VOIDmode)
9185 op_mode = GET_MODE (rs6000_compare_op1);
9187 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
9189 PUT_MODE (condition_rtx, DImode);
9190 convert_move (result, condition_rtx, 0);
9192 else
9194 PUT_MODE (condition_rtx, SImode);
9195 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
9199 /* Emit a branch of kind CODE to location LOC. */
9201 void
9202 rs6000_emit_cbranch (code, loc)
9203 enum rtx_code code;
9204 rtx loc;
9206 rtx condition_rtx, loc_ref;
9208 condition_rtx = rs6000_generate_compare (code);
9209 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
9210 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
9211 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
9212 loc_ref, pc_rtx)));
9215 /* Return the string to output a conditional branch to LABEL, which is
9216 the operand number of the label, or -1 if the branch is really a
9217 conditional return.
9219 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
9220 condition code register and its mode specifies what kind of
9221 comparison we made.
9223 REVERSED is nonzero if we should reverse the sense of the comparison.
9225 INSN is the insn. */
9227 char *
9228 output_cbranch (op, label, reversed, insn)
9229 rtx op;
9230 const char * label;
9231 int reversed;
9232 rtx insn;
9234 static char string[64];
9235 enum rtx_code code = GET_CODE (op);
9236 rtx cc_reg = XEXP (op, 0);
9237 enum machine_mode mode = GET_MODE (cc_reg);
9238 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
9239 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
9240 int really_reversed = reversed ^ need_longbranch;
9241 char *s = string;
9242 const char *ccode;
9243 const char *pred;
9244 rtx note;
9246 validate_condition_mode (code, mode);
9248 /* Work out which way this really branches. We could use
9249 reverse_condition_maybe_unordered here always but this
9250 makes the resulting assembler clearer. */
9251 if (really_reversed)
9253 /* Reversal of FP compares takes care -- an ordered compare
9254 becomes an unordered compare and vice versa. */
9255 if (mode == CCFPmode)
9256 code = reverse_condition_maybe_unordered (code);
9257 else
9258 code = reverse_condition (code);
9261 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
9263 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
9264 to the GT bit. */
9265 if (code == EQ)
9266 /* Opposite of GT. */
9267 code = UNLE;
9268 else if (code == NE)
9269 code = GT;
9270 else
9271 abort ();
9274 switch (code)
9276 /* Not all of these are actually distinct opcodes, but
9277 we distinguish them for clarity of the resulting assembler. */
9278 case NE: case LTGT:
9279 ccode = "ne"; break;
9280 case EQ: case UNEQ:
9281 ccode = "eq"; break;
9282 case GE: case GEU:
9283 ccode = "ge"; break;
9284 case GT: case GTU: case UNGT:
9285 ccode = "gt"; break;
9286 case LE: case LEU:
9287 ccode = "le"; break;
9288 case LT: case LTU: case UNLT:
9289 ccode = "lt"; break;
9290 case UNORDERED: ccode = "un"; break;
9291 case ORDERED: ccode = "nu"; break;
9292 case UNGE: ccode = "nl"; break;
9293 case UNLE: ccode = "ng"; break;
9294 default:
9295 abort ();
9298 /* Maybe we have a guess as to how likely the branch is.
9299 The old mnemonics don't have a way to specify this information. */
9300 pred = "";
9301 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
9302 if (note != NULL_RTX)
9304 /* PROB is the difference from 50%. */
9305 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
9306 bool always_hint = rs6000_cpu != PROCESSOR_POWER4;
9308 /* Only hint for highly probable/improbable branches on newer
9309 cpus as static prediction overrides processor dynamic
9310 prediction. For older cpus we may as well always hint, but
9311 assume not taken for branches that are very close to 50% as a
9312 mispredicted taken branch is more expensive than a
9313 mispredicted not-taken branch. */
9314 if (always_hint
9315 || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
9317 if (abs (prob) > REG_BR_PROB_BASE / 20
9318 && ((prob > 0) ^ need_longbranch))
9319 pred = "+";
9320 else
9321 pred = "-";
9325 if (label == NULL)
9326 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
9327 else
9328 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
9330 /* We need to escape any '%' characters in the reg_names string.
9331 Assume they'd only be the first character... */
9332 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
9333 *s++ = '%';
9334 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
9336 if (label != NULL)
9338 /* If the branch distance was too far, we may have to use an
9339 unconditional branch to go the distance. */
9340 if (need_longbranch)
9341 s += sprintf (s, ",$+8\n\tb %s", label);
9342 else
9343 s += sprintf (s, ",%s", label);
9346 return string;
9349 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
9350 operands of the last comparison is nonzero/true, FALSE_COND if it
9351 is zero/false. Return 0 if the hardware has no such operation. */
9354 rs6000_emit_cmove (dest, op, true_cond, false_cond)
9355 rtx dest;
9356 rtx op;
9357 rtx true_cond;
9358 rtx false_cond;
9360 enum rtx_code code = GET_CODE (op);
9361 rtx op0 = rs6000_compare_op0;
9362 rtx op1 = rs6000_compare_op1;
9363 REAL_VALUE_TYPE c1;
9364 enum machine_mode compare_mode = GET_MODE (op0);
9365 enum machine_mode result_mode = GET_MODE (dest);
9366 rtx temp;
9368 /* These modes should always match. */
9369 if (GET_MODE (op1) != compare_mode
9370 /* In the isel case however, we can use a compare immediate, so
9371 op1 may be a small constant. */
9372 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
9373 return 0;
9374 if (GET_MODE (true_cond) != result_mode)
9375 return 0;
9376 if (GET_MODE (false_cond) != result_mode)
9377 return 0;
9379 /* First, work out if the hardware can do this at all, or
9380 if it's too slow... */
9381 if (! rs6000_compare_fp_p)
9383 if (TARGET_ISEL)
9384 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
9385 return 0;
9388 /* Eliminate half of the comparisons by switching operands, this
9389 makes the remaining code simpler. */
9390 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
9391 || code == LTGT || code == LT || code == UNLE)
9393 code = reverse_condition_maybe_unordered (code);
9394 temp = true_cond;
9395 true_cond = false_cond;
9396 false_cond = temp;
9399 /* UNEQ and LTGT take four instructions for a comparison with zero,
9400 it'll probably be faster to use a branch here too. */
9401 if (code == UNEQ && HONOR_NANS (compare_mode))
9402 return 0;
9404 if (GET_CODE (op1) == CONST_DOUBLE)
9405 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
9407 /* We're going to try to implement comparisons by performing
9408 a subtract, then comparing against zero. Unfortunately,
9409 Inf - Inf is NaN which is not zero, and so if we don't
9410 know that the operand is finite and the comparison
9411 would treat EQ different to UNORDERED, we can't do it. */
9412 if (HONOR_INFINITIES (compare_mode)
9413 && code != GT && code != UNGE
9414 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
9415 /* Constructs of the form (a OP b ? a : b) are safe. */
9416 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
9417 || (! rtx_equal_p (op0, true_cond)
9418 && ! rtx_equal_p (op1, true_cond))))
9419 return 0;
9420 /* At this point we know we can use fsel. */
9422 /* Reduce the comparison to a comparison against zero. */
9423 temp = gen_reg_rtx (compare_mode);
9424 emit_insn (gen_rtx_SET (VOIDmode, temp,
9425 gen_rtx_MINUS (compare_mode, op0, op1)));
9426 op0 = temp;
9427 op1 = CONST0_RTX (compare_mode);
9429 /* If we don't care about NaNs we can reduce some of the comparisons
9430 down to faster ones. */
9431 if (! HONOR_NANS (compare_mode))
9432 switch (code)
9434 case GT:
9435 code = LE;
9436 temp = true_cond;
9437 true_cond = false_cond;
9438 false_cond = temp;
9439 break;
9440 case UNGE:
9441 code = GE;
9442 break;
9443 case UNEQ:
9444 code = EQ;
9445 break;
9446 default:
9447 break;
9450 /* Now, reduce everything down to a GE. */
9451 switch (code)
9453 case GE:
9454 break;
9456 case LE:
9457 temp = gen_reg_rtx (compare_mode);
9458 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
9459 op0 = temp;
9460 break;
9462 case ORDERED:
9463 temp = gen_reg_rtx (compare_mode);
9464 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
9465 op0 = temp;
9466 break;
9468 case EQ:
9469 temp = gen_reg_rtx (compare_mode);
9470 emit_insn (gen_rtx_SET (VOIDmode, temp,
9471 gen_rtx_NEG (compare_mode,
9472 gen_rtx_ABS (compare_mode, op0))));
9473 op0 = temp;
9474 break;
9476 case UNGE:
9477 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
9478 temp = gen_reg_rtx (result_mode);
9479 emit_insn (gen_rtx_SET (VOIDmode, temp,
9480 gen_rtx_IF_THEN_ELSE (result_mode,
9481 gen_rtx_GE (VOIDmode,
9482 op0, op1),
9483 true_cond, false_cond)));
9484 false_cond = true_cond;
9485 true_cond = temp;
9487 temp = gen_reg_rtx (compare_mode);
9488 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
9489 op0 = temp;
9490 break;
9492 case GT:
9493 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
9494 temp = gen_reg_rtx (result_mode);
9495 emit_insn (gen_rtx_SET (VOIDmode, temp,
9496 gen_rtx_IF_THEN_ELSE (result_mode,
9497 gen_rtx_GE (VOIDmode,
9498 op0, op1),
9499 true_cond, false_cond)));
9500 true_cond = false_cond;
9501 false_cond = temp;
9503 temp = gen_reg_rtx (compare_mode);
9504 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
9505 op0 = temp;
9506 break;
9508 default:
9509 abort ();
9512 emit_insn (gen_rtx_SET (VOIDmode, dest,
9513 gen_rtx_IF_THEN_ELSE (result_mode,
9514 gen_rtx_GE (VOIDmode,
9515 op0, op1),
9516 true_cond, false_cond)));
9517 return 1;
9520 /* Same as above, but for ints (isel). */
9522 static int
9523 rs6000_emit_int_cmove (dest, op, true_cond, false_cond)
9524 rtx dest;
9525 rtx op;
9526 rtx true_cond;
9527 rtx false_cond;
9529 rtx condition_rtx, cr;
9531 /* All isel implementations thus far are 32-bits. */
9532 if (GET_MODE (rs6000_compare_op0) != SImode)
9533 return 0;
9535 /* We still have to do the compare, because isel doesn't do a
9536 compare, it just looks at the CRx bits set by a previous compare
9537 instruction. */
9538 condition_rtx = rs6000_generate_compare (GET_CODE (op));
9539 cr = XEXP (condition_rtx, 0);
9541 if (GET_MODE (cr) == CCmode)
9542 emit_insn (gen_isel_signed (dest, condition_rtx,
9543 true_cond, false_cond, cr));
9544 else
9545 emit_insn (gen_isel_unsigned (dest, condition_rtx,
9546 true_cond, false_cond, cr));
9548 return 1;
9551 const char *
9552 output_isel (operands)
9553 rtx *operands;
9555 enum rtx_code code;
9557 code = GET_CODE (operands[1]);
9558 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
9560 PUT_CODE (operands[1], reverse_condition (code));
9561 return "isel %0,%3,%2,%j1";
9563 else
9564 return "isel %0,%2,%3,%j1";
9567 void
9568 rs6000_emit_minmax (dest, code, op0, op1)
9569 rtx dest;
9570 enum rtx_code code;
9571 rtx op0;
9572 rtx op1;
9574 enum machine_mode mode = GET_MODE (op0);
9575 enum rtx_code c;
9576 rtx target;
9578 if (code == SMAX || code == SMIN)
9579 c = GE;
9580 else
9581 c = GEU;
9583 if (code == SMAX || code == UMAX)
9584 target = emit_conditional_move (dest, c, op0, op1, mode,
9585 op0, op1, mode, 0);
9586 else
9587 target = emit_conditional_move (dest, c, op0, op1, mode,
9588 op1, op0, mode, 0);
9589 if (target == NULL_RTX)
9590 abort ();
9591 if (target != dest)
9592 emit_move_insn (dest, target);
9595 /* This page contains routines that are used to determine what the
9596 function prologue and epilogue code will do and write them out. */
9598 /* Return the first fixed-point register that is required to be
9599 saved. 32 if none. */
9602 first_reg_to_save ()
9604 int first_reg;
9606 /* Find lowest numbered live register. */
9607 for (first_reg = 13; first_reg <= 31; first_reg++)
9608 if (regs_ever_live[first_reg]
9609 && (! call_used_regs[first_reg]
9610 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
9611 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
9612 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
9613 break;
9615 #if TARGET_MACHO
9616 if (flag_pic
9617 && current_function_uses_pic_offset_table
9618 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
9619 return RS6000_PIC_OFFSET_TABLE_REGNUM;
9620 #endif
9622 return first_reg;
9625 /* Similar, for FP regs. */
9628 first_fp_reg_to_save ()
9630 int first_reg;
9632 /* Find lowest numbered live register. */
9633 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
9634 if (regs_ever_live[first_reg])
9635 break;
9637 return first_reg;
9640 /* Similar, for AltiVec regs. */
9642 static int
9643 first_altivec_reg_to_save ()
9645 int i;
9647 /* Stack frame remains as is unless we are in AltiVec ABI. */
9648 if (! TARGET_ALTIVEC_ABI)
9649 return LAST_ALTIVEC_REGNO + 1;
9651 /* Find lowest numbered live register. */
9652 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
9653 if (regs_ever_live[i])
9654 break;
9656 return i;
9659 /* Return a 32-bit mask of the AltiVec registers we need to set in
9660 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
9661 the 32-bit word is 0. */
9663 static unsigned int
9664 compute_vrsave_mask ()
9666 unsigned int i, mask = 0;
9668 /* First, find out if we use _any_ altivec registers. */
9669 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
9670 if (regs_ever_live[i])
9671 mask |= ALTIVEC_REG_BIT (i);
9673 if (mask == 0)
9674 return mask;
9676 /* Next, remove the argument registers from the set. These must
9677 be in the VRSAVE mask set by the caller, so we don't need to add
9678 them in again. More importantly, the mask we compute here is
9679 used to generate CLOBBERs in the set_vrsave insn, and we do not
9680 wish the argument registers to die. */
9681 for (i = cfun->args_info.vregno; i >= ALTIVEC_ARG_MIN_REG; --i)
9682 mask &= ~ALTIVEC_REG_BIT (i);
9684 /* Similarly, remove the return value from the set. */
9686 bool yes = false;
9687 diddle_return_value (is_altivec_return_reg, &yes);
9688 if (yes)
9689 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
9692 return mask;
9695 static void
9696 is_altivec_return_reg (reg, xyes)
9697 rtx reg;
9698 void *xyes;
9700 bool *yes = (bool *) xyes;
9701 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
9702 *yes = true;
9706 /* Calculate the stack information for the current function. This is
9707 complicated by having two separate calling sequences, the AIX calling
9708 sequence and the V.4 calling sequence.
9710 AIX (and Darwin/Mac OS X) stack frames look like:
9711 32-bit 64-bit
9712 SP----> +---------------------------------------+
9713 | back chain to caller | 0 0
9714 +---------------------------------------+
9715 | saved CR | 4 8 (8-11)
9716 +---------------------------------------+
9717 | saved LR | 8 16
9718 +---------------------------------------+
9719 | reserved for compilers | 12 24
9720 +---------------------------------------+
9721 | reserved for binders | 16 32
9722 +---------------------------------------+
9723 | saved TOC pointer | 20 40
9724 +---------------------------------------+
9725 | Parameter save area (P) | 24 48
9726 +---------------------------------------+
9727 | Alloca space (A) | 24+P etc.
9728 +---------------------------------------+
9729 | Local variable space (L) | 24+P+A
9730 +---------------------------------------+
9731 | Float/int conversion temporary (X) | 24+P+A+L
9732 +---------------------------------------+
9733 | Save area for AltiVec registers (W) | 24+P+A+L+X
9734 +---------------------------------------+
9735 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
9736 +---------------------------------------+
9737 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
9738 +---------------------------------------+
9739 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
9740 +---------------------------------------+
9741 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
9742 +---------------------------------------+
9743 old SP->| back chain to caller's caller |
9744 +---------------------------------------+
9746 The required alignment for AIX configurations is two words (i.e., 8
9747 or 16 bytes).
9750 V.4 stack frames look like:
9752 SP----> +---------------------------------------+
9753 | back chain to caller | 0
9754 +---------------------------------------+
9755 | caller's saved LR | 4
9756 +---------------------------------------+
9757 | Parameter save area (P) | 8
9758 +---------------------------------------+
9759 | Alloca space (A) | 8+P
9760 +---------------------------------------+
9761 | Varargs save area (V) | 8+P+A
9762 +---------------------------------------+
9763 | Local variable space (L) | 8+P+A+V
9764 +---------------------------------------+
9765 | Float/int conversion temporary (X) | 8+P+A+V+L
9766 +---------------------------------------+
9767 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
9768 +---------------------------------------+
9769 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
9770 +---------------------------------------+
9771 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
9772 +---------------------------------------+
9773 | SPE: area for 64-bit GP registers |
9774 +---------------------------------------+
9775 | SPE alignment padding |
9776 +---------------------------------------+
9777 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
9778 +---------------------------------------+
9779 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
9780 +---------------------------------------+
9781 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
9782 +---------------------------------------+
9783 old SP->| back chain to caller's caller |
9784 +---------------------------------------+
9786 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
9787 given. (But note below and in sysv4.h that we require only 8 and
9788 may round up the size of our stack frame anyways. The historical
9789 reason is early versions of powerpc-linux which didn't properly
9790 align the stack at program startup. A happy side-effect is that
9791 -mno-eabi libraries can be used with -meabi programs.)
9793 The EABI configuration defaults to the V.4 layout. However,
9794 the stack alignment requirements may differ. If -mno-eabi is not
9795 given, the required stack alignment is 8 bytes; if -mno-eabi is
9796 given, the required alignment is 16 bytes. (But see V.4 comment
9797 above.) */
9799 #ifndef ABI_STACK_BOUNDARY
9800 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
9801 #endif
9803 rs6000_stack_t *
9804 rs6000_stack_info ()
9806 static rs6000_stack_t info, zero_info;
9807 rs6000_stack_t *info_ptr = &info;
9808 int reg_size = TARGET_POWERPC64 ? 8 : 4;
9809 int ehrd_size;
9810 int total_raw_size;
9812 /* Zero all fields portably. */
9813 info = zero_info;
9815 if (TARGET_SPE)
9817 /* Cache value so we don't rescan instruction chain over and over. */
9818 if (cfun->machine->insn_chain_scanned_p == 0)
9820 cfun->machine->insn_chain_scanned_p = 1;
9821 info_ptr->spe_64bit_regs_used = (int) spe_func_has_64bit_regs_p ();
9825 /* Select which calling sequence. */
9826 info_ptr->abi = DEFAULT_ABI;
9828 /* Calculate which registers need to be saved & save area size. */
9829 info_ptr->first_gp_reg_save = first_reg_to_save ();
9830 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
9831 even if it currently looks like we won't. */
9832 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
9833 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
9834 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
9835 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
9836 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
9837 else
9838 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
9840 /* For the SPE, we have an additional upper 32-bits on each GPR.
9841 Ideally we should save the entire 64-bits only when the upper
9842 half is used in SIMD instructions. Since we only record
9843 registers live (not the size they are used in), this proves
9844 difficult because we'd have to traverse the instruction chain at
9845 the right time, taking reload into account. This is a real pain,
9846 so we opt to save the GPRs in 64-bits always if but one register
9847 gets used in 64-bits. Otherwise, all the registers in the frame
9848 get saved in 32-bits.
9850 So... since when we save all GPRs (except the SP) in 64-bits, the
9851 traditional GP save area will be empty. */
9852 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
9853 info_ptr->gp_size = 0;
9855 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
9856 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
9858 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
9859 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
9860 - info_ptr->first_altivec_reg_save);
9862 /* Does this function call anything? */
9863 info_ptr->calls_p = (! current_function_is_leaf
9864 || cfun->machine->ra_needs_full_frame);
9866 /* Determine if we need to save the link register. */
9867 if (rs6000_ra_ever_killed ()
9868 || (DEFAULT_ABI == ABI_AIX
9869 && current_function_profile
9870 && !TARGET_PROFILE_KERNEL)
9871 #ifdef TARGET_RELOCATABLE
9872 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
9873 #endif
9874 || (info_ptr->first_fp_reg_save != 64
9875 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
9876 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
9877 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
9878 || (DEFAULT_ABI == ABI_DARWIN
9879 && flag_pic
9880 && current_function_uses_pic_offset_table)
9881 || info_ptr->calls_p)
9883 info_ptr->lr_save_p = 1;
9884 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
9887 /* Determine if we need to save the condition code registers. */
9888 if (regs_ever_live[CR2_REGNO]
9889 || regs_ever_live[CR3_REGNO]
9890 || regs_ever_live[CR4_REGNO])
9892 info_ptr->cr_save_p = 1;
9893 if (DEFAULT_ABI == ABI_V4)
9894 info_ptr->cr_size = reg_size;
9897 /* If the current function calls __builtin_eh_return, then we need
9898 to allocate stack space for registers that will hold data for
9899 the exception handler. */
9900 if (current_function_calls_eh_return)
9902 unsigned int i;
9903 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
9904 continue;
9906 /* SPE saves EH registers in 64-bits. */
9907 ehrd_size = i * (TARGET_SPE_ABI
9908 && info_ptr->spe_64bit_regs_used != 0
9909 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
9911 else
9912 ehrd_size = 0;
9914 /* Determine various sizes. */
9915 info_ptr->reg_size = reg_size;
9916 info_ptr->fixed_size = RS6000_SAVE_AREA;
9917 info_ptr->varargs_size = RS6000_VARARGS_AREA;
9918 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
9919 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
9922 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
9923 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
9924 else
9925 info_ptr->spe_gp_size = 0;
9927 if (TARGET_ALTIVEC_ABI && TARGET_ALTIVEC_VRSAVE)
9929 info_ptr->vrsave_mask = compute_vrsave_mask ();
9930 info_ptr->vrsave_size = info_ptr->vrsave_mask ? 4 : 0;
9932 else
9934 info_ptr->vrsave_mask = 0;
9935 info_ptr->vrsave_size = 0;
9938 /* Calculate the offsets. */
9939 switch (DEFAULT_ABI)
9941 case ABI_NONE:
9942 default:
9943 abort ();
9945 case ABI_AIX:
9946 case ABI_DARWIN:
9947 info_ptr->fp_save_offset = - info_ptr->fp_size;
9948 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
9950 if (TARGET_ALTIVEC_ABI)
9952 info_ptr->vrsave_save_offset
9953 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
9955 /* Align stack so vector save area is on a quadword boundary. */
9956 if (info_ptr->altivec_size != 0)
9957 info_ptr->altivec_padding_size
9958 = 16 - (-info_ptr->vrsave_save_offset % 16);
9959 else
9960 info_ptr->altivec_padding_size = 0;
9962 info_ptr->altivec_save_offset
9963 = info_ptr->vrsave_save_offset
9964 - info_ptr->altivec_padding_size
9965 - info_ptr->altivec_size;
9967 /* Adjust for AltiVec case. */
9968 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
9970 else
9971 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
9972 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
9973 info_ptr->lr_save_offset = 2*reg_size;
9974 break;
9976 case ABI_V4:
9977 info_ptr->fp_save_offset = - info_ptr->fp_size;
9978 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
9979 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
9981 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
9983 /* Align stack so SPE GPR save area is aligned on a
9984 double-word boundary. */
9985 if (info_ptr->spe_gp_size != 0)
9986 info_ptr->spe_padding_size
9987 = 8 - (-info_ptr->cr_save_offset % 8);
9988 else
9989 info_ptr->spe_padding_size = 0;
9991 info_ptr->spe_gp_save_offset
9992 = info_ptr->cr_save_offset
9993 - info_ptr->spe_padding_size
9994 - info_ptr->spe_gp_size;
9996 /* Adjust for SPE case. */
9997 info_ptr->toc_save_offset
9998 = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
10000 else if (TARGET_ALTIVEC_ABI)
10002 info_ptr->vrsave_save_offset
10003 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
10005 /* Align stack so vector save area is on a quadword boundary. */
10006 if (info_ptr->altivec_size != 0)
10007 info_ptr->altivec_padding_size
10008 = 16 - (-info_ptr->vrsave_save_offset % 16);
10009 else
10010 info_ptr->altivec_padding_size = 0;
10012 info_ptr->altivec_save_offset
10013 = info_ptr->vrsave_save_offset
10014 - info_ptr->altivec_padding_size
10015 - info_ptr->altivec_size;
10017 /* Adjust for AltiVec case. */
10018 info_ptr->toc_save_offset
10019 = info_ptr->altivec_save_offset - info_ptr->toc_size;
10021 else
10022 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
10023 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
10024 info_ptr->lr_save_offset = reg_size;
10025 break;
10028 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
10029 + info_ptr->gp_size
10030 + info_ptr->altivec_size
10031 + info_ptr->altivec_padding_size
10032 + info_ptr->spe_gp_size
10033 + info_ptr->spe_padding_size
10034 + ehrd_size
10035 + info_ptr->cr_size
10036 + info_ptr->lr_size
10037 + info_ptr->vrsave_size
10038 + info_ptr->toc_size,
10039 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
10040 ? 16 : 8);
10042 total_raw_size = (info_ptr->vars_size
10043 + info_ptr->parm_size
10044 + info_ptr->save_size
10045 + info_ptr->varargs_size
10046 + info_ptr->fixed_size);
10048 info_ptr->total_size =
10049 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
10051 /* Determine if we need to allocate any stack frame:
10053 For AIX we need to push the stack if a frame pointer is needed
10054 (because the stack might be dynamically adjusted), if we are
10055 debugging, if we make calls, or if the sum of fp_save, gp_save,
10056 and local variables are more than the space needed to save all
10057 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
10058 + 18*8 = 288 (GPR13 reserved).
10060 For V.4 we don't have the stack cushion that AIX uses, but assume
10061 that the debugger can handle stackless frames. */
10063 if (info_ptr->calls_p)
10064 info_ptr->push_p = 1;
10066 else if (DEFAULT_ABI == ABI_V4)
10067 info_ptr->push_p = total_raw_size > info_ptr->fixed_size;
10069 else if (frame_pointer_needed)
10070 info_ptr->push_p = 1;
10072 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
10073 info_ptr->push_p = 1;
10075 else
10076 info_ptr->push_p
10077 = total_raw_size - info_ptr->fixed_size > (TARGET_32BIT ? 220 : 288);
10079 /* Zero offsets if we're not saving those registers. */
10080 if (info_ptr->fp_size == 0)
10081 info_ptr->fp_save_offset = 0;
10083 if (info_ptr->gp_size == 0)
10084 info_ptr->gp_save_offset = 0;
10086 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
10087 info_ptr->altivec_save_offset = 0;
10089 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
10090 info_ptr->vrsave_save_offset = 0;
10092 if (! TARGET_SPE_ABI
10093 || info_ptr->spe_64bit_regs_used == 0
10094 || info_ptr->spe_gp_size == 0)
10095 info_ptr->spe_gp_save_offset = 0;
10097 if (! info_ptr->lr_save_p)
10098 info_ptr->lr_save_offset = 0;
10100 if (! info_ptr->cr_save_p)
10101 info_ptr->cr_save_offset = 0;
10103 if (! info_ptr->toc_save_p)
10104 info_ptr->toc_save_offset = 0;
10106 return info_ptr;
10109 /* Return true if the current function uses any GPRs in 64-bit SIMD
10110 mode. */
10112 static bool
10113 spe_func_has_64bit_regs_p ()
10115 rtx insns, insn;
10117 /* Functions that save and restore all the call-saved registers will
10118 need to save/restore the registers in 64-bits. */
10119 if (current_function_calls_eh_return
10120 || current_function_calls_setjmp
10121 || current_function_has_nonlocal_goto)
10122 return true;
10124 insns = get_insns ();
10126 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
10128 if (INSN_P (insn))
10130 rtx i;
10132 i = PATTERN (insn);
10133 if (GET_CODE (i) == SET
10134 && SPE_VECTOR_MODE (GET_MODE (SET_SRC (i))))
10135 return true;
10139 return false;
10142 void
10143 debug_stack_info (info)
10144 rs6000_stack_t *info;
10146 const char *abi_string;
10148 if (! info)
10149 info = rs6000_stack_info ();
10151 fprintf (stderr, "\nStack information for function %s:\n",
10152 ((current_function_decl && DECL_NAME (current_function_decl))
10153 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
10154 : "<unknown>"));
10156 switch (info->abi)
10158 default: abi_string = "Unknown"; break;
10159 case ABI_NONE: abi_string = "NONE"; break;
10160 case ABI_AIX: abi_string = "AIX"; break;
10161 case ABI_DARWIN: abi_string = "Darwin"; break;
10162 case ABI_V4: abi_string = "V.4"; break;
10165 fprintf (stderr, "\tABI = %5s\n", abi_string);
10167 if (TARGET_ALTIVEC_ABI)
10168 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
10170 if (TARGET_SPE_ABI)
10171 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
10173 if (info->first_gp_reg_save != 32)
10174 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
10176 if (info->first_fp_reg_save != 64)
10177 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
10179 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
10180 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
10181 info->first_altivec_reg_save);
10183 if (info->lr_save_p)
10184 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
10186 if (info->cr_save_p)
10187 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
10189 if (info->toc_save_p)
10190 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
10192 if (info->vrsave_mask)
10193 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
10195 if (info->push_p)
10196 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
10198 if (info->calls_p)
10199 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
10201 if (info->gp_save_offset)
10202 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
10204 if (info->fp_save_offset)
10205 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
10207 if (info->altivec_save_offset)
10208 fprintf (stderr, "\taltivec_save_offset = %5d\n",
10209 info->altivec_save_offset);
10211 if (info->spe_gp_save_offset)
10212 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
10213 info->spe_gp_save_offset);
10215 if (info->vrsave_save_offset)
10216 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
10217 info->vrsave_save_offset);
10219 if (info->lr_save_offset)
10220 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
10222 if (info->cr_save_offset)
10223 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
10225 if (info->toc_save_offset)
10226 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
10228 if (info->varargs_save_offset)
10229 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
10231 if (info->total_size)
10232 fprintf (stderr, "\ttotal_size = %5d\n", info->total_size);
10234 if (info->varargs_size)
10235 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
10237 if (info->vars_size)
10238 fprintf (stderr, "\tvars_size = %5d\n", info->vars_size);
10240 if (info->parm_size)
10241 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
10243 if (info->fixed_size)
10244 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
10246 if (info->gp_size)
10247 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
10249 if (info->spe_gp_size)
10250 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
10252 if (info->fp_size)
10253 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
10255 if (info->altivec_size)
10256 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
10258 if (info->vrsave_size)
10259 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
10261 if (info->altivec_padding_size)
10262 fprintf (stderr, "\taltivec_padding_size= %5d\n",
10263 info->altivec_padding_size);
10265 if (info->spe_padding_size)
10266 fprintf (stderr, "\tspe_padding_size = %5d\n",
10267 info->spe_padding_size);
10269 if (info->lr_size)
10270 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
10272 if (info->cr_size)
10273 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
10275 if (info->toc_size)
10276 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
10278 if (info->save_size)
10279 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
10281 if (info->reg_size != 4)
10282 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
10284 fprintf (stderr, "\n");
10288 rs6000_return_addr (count, frame)
10289 int count;
10290 rtx frame;
10292 /* Currently we don't optimize very well between prolog and body
10293 code and for PIC code the code can be actually quite bad, so
10294 don't try to be too clever here. */
10295 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
10297 cfun->machine->ra_needs_full_frame = 1;
10299 return
10300 gen_rtx_MEM
10301 (Pmode,
10302 memory_address
10303 (Pmode,
10304 plus_constant (copy_to_reg
10305 (gen_rtx_MEM (Pmode,
10306 memory_address (Pmode, frame))),
10307 RETURN_ADDRESS_OFFSET)));
10310 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
10313 /* Say whether a function is a candidate for sibcall handling or not.
10314 We do not allow indirect calls to be optimized into sibling calls.
10315 Also, we can't do it if there are any vector parameters; there's
10316 nowhere to put the VRsave code so it works; note that functions with
10317 vector parameters are required to have a prototype, so the argument
10318 type info must be available here. (The tail recursion case can work
10319 with vector parameters, but there's no way to distinguish here.) */
10320 static bool
10321 rs6000_function_ok_for_sibcall (decl, exp)
10322 tree decl;
10323 tree exp ATTRIBUTE_UNUSED;
10325 tree type;
10326 if (decl)
10328 if (TARGET_ALTIVEC_VRSAVE)
10330 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
10331 type; type = TREE_CHAIN (type))
10333 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
10334 return false;
10337 if (DEFAULT_ABI == ABI_DARWIN
10338 || (*targetm.binds_local_p) (decl))
10340 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
10342 if (!lookup_attribute ("longcall", attr_list)
10343 || lookup_attribute ("shortcall", attr_list))
10344 return true;
10347 return false;
10350 static int
10351 rs6000_ra_ever_killed ()
10353 rtx top;
10354 rtx reg;
10355 rtx insn;
10357 /* Irritatingly, there are two kinds of thunks -- those created with
10358 TARGET_ASM_OUTPUT_MI_THUNK and those with DECL_THUNK_P that go
10359 through the regular part of the compiler. This is a very hacky
10360 way to tell them apart. */
10361 if (current_function_is_thunk && !no_new_pseudos)
10362 return 0;
10364 /* regs_ever_live has LR marked as used if any sibcalls are present,
10365 but this should not force saving and restoring in the
10366 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
10367 clobbers LR, so that is inappropriate. */
10369 /* Also, the prologue can generate a store into LR that
10370 doesn't really count, like this:
10372 move LR->R0
10373 bcl to set PIC register
10374 move LR->R31
10375 move R0->LR
10377 When we're called from the epilogue, we need to avoid counting
10378 this as a store. */
10380 push_topmost_sequence ();
10381 top = get_insns ();
10382 pop_topmost_sequence ();
10383 reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
10385 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
10387 if (INSN_P (insn))
10389 if (FIND_REG_INC_NOTE (insn, reg))
10390 return 1;
10391 else if (GET_CODE (insn) == CALL_INSN
10392 && !SIBLING_CALL_P (insn))
10393 return 1;
10394 else if (set_of (reg, insn) != NULL_RTX
10395 && !prologue_epilogue_contains (insn))
10396 return 1;
10399 return 0;
10402 /* Add a REG_MAYBE_DEAD note to the insn. */
10403 static void
10404 rs6000_maybe_dead (insn)
10405 rtx insn;
10407 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
10408 const0_rtx,
10409 REG_NOTES (insn));
10412 /* Emit instructions needed to load the TOC register.
10413 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
10414 a constant pool; or for SVR4 -fpic. */
10416 void
10417 rs6000_emit_load_toc_table (fromprolog)
10418 int fromprolog;
10420 rtx dest, insn;
10421 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
10423 if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
10425 rtx temp = (fromprolog
10426 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
10427 : gen_reg_rtx (Pmode));
10428 insn = emit_insn (gen_load_toc_v4_pic_si (temp));
10429 if (fromprolog)
10430 rs6000_maybe_dead (insn);
10431 insn = emit_move_insn (dest, temp);
10432 if (fromprolog)
10433 rs6000_maybe_dead (insn);
10435 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
10437 char buf[30];
10438 rtx tempLR = (fromprolog
10439 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
10440 : gen_reg_rtx (Pmode));
10441 rtx temp0 = (fromprolog
10442 ? gen_rtx_REG (Pmode, 0)
10443 : gen_reg_rtx (Pmode));
10444 rtx symF;
10446 /* possibly create the toc section */
10447 if (! toc_initialized)
10449 toc_section ();
10450 function_section (current_function_decl);
10453 if (fromprolog)
10455 rtx symL;
10457 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
10458 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10460 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
10461 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10463 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
10464 symF)));
10465 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
10466 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
10467 symL,
10468 symF)));
10470 else
10472 rtx tocsym;
10473 static int reload_toc_labelno = 0;
10475 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
10477 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
10478 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10480 emit_insn (gen_load_toc_v4_PIC_1b (tempLR, symF, tocsym));
10481 emit_move_insn (dest, tempLR);
10482 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
10484 insn = emit_insn (gen_addsi3 (dest, temp0, dest));
10485 if (fromprolog)
10486 rs6000_maybe_dead (insn);
10488 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
10490 /* This is for AIX code running in non-PIC ELF32. */
10491 char buf[30];
10492 rtx realsym;
10493 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
10494 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10496 insn = emit_insn (gen_elf_high (dest, realsym));
10497 if (fromprolog)
10498 rs6000_maybe_dead (insn);
10499 insn = emit_insn (gen_elf_low (dest, dest, realsym));
10500 if (fromprolog)
10501 rs6000_maybe_dead (insn);
10503 else if (DEFAULT_ABI == ABI_AIX)
10505 if (TARGET_32BIT)
10506 insn = emit_insn (gen_load_toc_aix_si (dest));
10507 else
10508 insn = emit_insn (gen_load_toc_aix_di (dest));
10509 if (fromprolog)
10510 rs6000_maybe_dead (insn);
10512 else
10513 abort ();
10516 int
10517 get_TOC_alias_set ()
10519 static int set = -1;
10520 if (set == -1)
10521 set = new_alias_set ();
10522 return set;
10525 /* This retuns nonzero if the current function uses the TOC. This is
10526 determined by the presence of (unspec ... UNSPEC_TOC) or
10527 use (unspec ... UNSPEC_TOC), which are generated by the various
10528 load_toc_* patterns. */
10531 uses_TOC ()
10533 rtx insn;
10535 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
10536 if (INSN_P (insn))
10538 rtx pat = PATTERN (insn);
10539 int i;
10541 if (GET_CODE (pat) == PARALLEL)
10542 for (i = 0; i < XVECLEN (pat, 0); i++)
10544 rtx sub = XVECEXP (pat, 0, i);
10545 if (GET_CODE (sub) == USE)
10547 sub = XEXP (sub, 0);
10548 if (GET_CODE (sub) == UNSPEC
10549 && XINT (sub, 1) == UNSPEC_TOC)
10550 return 1;
10554 return 0;
10558 create_TOC_reference (symbol)
10559 rtx symbol;
10561 return gen_rtx_PLUS (Pmode,
10562 gen_rtx_REG (Pmode, TOC_REGISTER),
10563 gen_rtx_CONST (Pmode,
10564 gen_rtx_MINUS (Pmode, symbol,
10565 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
10568 #if TARGET_AIX
10569 /* __throw will restore its own return address to be the same as the
10570 return address of the function that the throw is being made to.
10571 This is unfortunate, because we want to check the original
10572 return address to see if we need to restore the TOC.
10573 So we have to squirrel it away here.
10574 This is used only in compiling __throw and __rethrow.
10576 Most of this code should be removed by CSE. */
10577 static rtx insn_after_throw;
10579 /* This does the saving... */
10580 void
10581 rs6000_aix_emit_builtin_unwind_init ()
10583 rtx mem;
10584 rtx stack_top = gen_reg_rtx (Pmode);
10585 rtx opcode_addr = gen_reg_rtx (Pmode);
10587 insn_after_throw = gen_reg_rtx (SImode);
10589 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
10590 emit_move_insn (stack_top, mem);
10592 mem = gen_rtx_MEM (Pmode,
10593 gen_rtx_PLUS (Pmode, stack_top,
10594 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
10595 emit_move_insn (opcode_addr, mem);
10596 emit_move_insn (insn_after_throw, gen_rtx_MEM (SImode, opcode_addr));
10599 /* Emit insns to _restore_ the TOC register, at runtime (specifically
10600 in _eh.o). Only used on AIX.
10602 The idea is that on AIX, function calls look like this:
10603 bl somefunction-trampoline
10604 lwz r2,20(sp)
10606 and later,
10607 somefunction-trampoline:
10608 stw r2,20(sp)
10609 ... load function address in the count register ...
10610 bctr
10611 or like this, if the linker determines that this is not a cross-module call
10612 and so the TOC need not be restored:
10613 bl somefunction
10615 or like this, if the compiler could determine that this is not a
10616 cross-module call:
10617 bl somefunction
10618 now, the tricky bit here is that register 2 is saved and restored
10619 by the _linker_, so we can't readily generate debugging information
10620 for it. So we need to go back up the call chain looking at the
10621 insns at return addresses to see which calls saved the TOC register
10622 and so see where it gets restored from.
10624 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
10625 just before the actual epilogue.
10627 On the bright side, this incurs no space or time overhead unless an
10628 exception is thrown, except for the extra code in libgcc.a.
10630 The parameter STACKSIZE is a register containing (at runtime)
10631 the amount to be popped off the stack in addition to the stack frame
10632 of this routine (which will be __throw or __rethrow, and so is
10633 guaranteed to have a stack frame). */
10635 void
10636 rs6000_emit_eh_toc_restore (stacksize)
10637 rtx stacksize;
10639 rtx top_of_stack;
10640 rtx bottom_of_stack = gen_reg_rtx (Pmode);
10641 rtx tocompare = gen_reg_rtx (SImode);
10642 rtx opcode = gen_reg_rtx (SImode);
10643 rtx opcode_addr = gen_reg_rtx (Pmode);
10644 rtx mem;
10645 rtx loop_start = gen_label_rtx ();
10646 rtx no_toc_restore_needed = gen_label_rtx ();
10647 rtx loop_exit = gen_label_rtx ();
10649 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
10650 set_mem_alias_set (mem, rs6000_sr_alias_set);
10651 emit_move_insn (bottom_of_stack, mem);
10653 top_of_stack = expand_binop (Pmode, add_optab,
10654 bottom_of_stack, stacksize,
10655 NULL_RTX, 1, OPTAB_WIDEN);
10657 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
10658 : 0xE8410028, SImode));
10660 if (insn_after_throw == NULL_RTX)
10661 abort ();
10662 emit_move_insn (opcode, insn_after_throw);
10664 emit_note (NULL, NOTE_INSN_LOOP_BEG);
10665 emit_label (loop_start);
10667 do_compare_rtx_and_jump (opcode, tocompare, NE, 1,
10668 SImode, NULL_RTX, NULL_RTX,
10669 no_toc_restore_needed);
10671 mem = gen_rtx_MEM (Pmode,
10672 gen_rtx_PLUS (Pmode, bottom_of_stack,
10673 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
10674 emit_move_insn (gen_rtx_REG (Pmode, 2), mem);
10676 emit_label (no_toc_restore_needed);
10677 do_compare_rtx_and_jump (top_of_stack, bottom_of_stack, EQ, 1,
10678 Pmode, NULL_RTX, NULL_RTX,
10679 loop_exit);
10681 mem = gen_rtx_MEM (Pmode, bottom_of_stack);
10682 set_mem_alias_set (mem, rs6000_sr_alias_set);
10683 emit_move_insn (bottom_of_stack, mem);
10685 mem = gen_rtx_MEM (Pmode,
10686 gen_rtx_PLUS (Pmode, bottom_of_stack,
10687 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
10688 emit_move_insn (opcode_addr, mem);
10689 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
10691 emit_note (NULL, NOTE_INSN_LOOP_CONT);
10692 emit_jump (loop_start);
10693 emit_note (NULL, NOTE_INSN_LOOP_END);
10694 emit_label (loop_exit);
10696 #endif /* TARGET_AIX */
10698 /* This ties together stack memory (MEM with an alias set of
10699 rs6000_sr_alias_set) and the change to the stack pointer. */
10701 static void
10702 rs6000_emit_stack_tie ()
10704 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
10706 set_mem_alias_set (mem, rs6000_sr_alias_set);
10707 emit_insn (gen_stack_tie (mem));
10710 /* Emit the correct code for allocating stack space, as insns.
10711 If COPY_R12, make sure a copy of the old frame is left in r12.
10712 The generated code may use hard register 0 as a temporary. */
10714 static void
10715 rs6000_emit_allocate_stack (size, copy_r12)
10716 HOST_WIDE_INT size;
10717 int copy_r12;
10719 rtx insn;
10720 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
10721 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
10722 rtx todec = GEN_INT (-size);
10724 if (current_function_limit_stack)
10726 if (REG_P (stack_limit_rtx)
10727 && REGNO (stack_limit_rtx) > 1
10728 && REGNO (stack_limit_rtx) <= 31)
10730 emit_insn (TARGET_32BIT
10731 ? gen_addsi3 (tmp_reg,
10732 stack_limit_rtx,
10733 GEN_INT (size))
10734 : gen_adddi3 (tmp_reg,
10735 stack_limit_rtx,
10736 GEN_INT (size)));
10738 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
10739 const0_rtx));
10741 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
10742 && TARGET_32BIT
10743 && DEFAULT_ABI == ABI_V4)
10745 rtx toload = gen_rtx_CONST (VOIDmode,
10746 gen_rtx_PLUS (Pmode,
10747 stack_limit_rtx,
10748 GEN_INT (size)));
10750 emit_insn (gen_elf_high (tmp_reg, toload));
10751 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
10752 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
10753 const0_rtx));
10755 else
10756 warning ("stack limit expression is not supported");
10759 if (copy_r12 || ! TARGET_UPDATE)
10760 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
10762 if (TARGET_UPDATE)
10764 if (size > 32767)
10766 /* Need a note here so that try_split doesn't get confused. */
10767 if (get_last_insn() == NULL_RTX)
10768 emit_note (0, NOTE_INSN_DELETED);
10769 insn = emit_move_insn (tmp_reg, todec);
10770 try_split (PATTERN (insn), insn, 0);
10771 todec = tmp_reg;
10774 insn = emit_insn (TARGET_32BIT
10775 ? gen_movsi_update (stack_reg, stack_reg,
10776 todec, stack_reg)
10777 : gen_movdi_update (stack_reg, stack_reg,
10778 todec, stack_reg));
10780 else
10782 insn = emit_insn (TARGET_32BIT
10783 ? gen_addsi3 (stack_reg, stack_reg, todec)
10784 : gen_adddi3 (stack_reg, stack_reg, todec));
10785 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
10786 gen_rtx_REG (Pmode, 12));
10789 RTX_FRAME_RELATED_P (insn) = 1;
10790 REG_NOTES (insn) =
10791 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
10792 gen_rtx_SET (VOIDmode, stack_reg,
10793 gen_rtx_PLUS (Pmode, stack_reg,
10794 GEN_INT (-size))),
10795 REG_NOTES (insn));
10798 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
10799 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
10800 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
10801 deduce these equivalences by itself so it wasn't necessary to hold
10802 its hand so much. */
10804 static void
10805 rs6000_frame_related (insn, reg, val, reg2, rreg)
10806 rtx insn;
10807 rtx reg;
10808 HOST_WIDE_INT val;
10809 rtx reg2;
10810 rtx rreg;
10812 rtx real, temp;
10814 /* copy_rtx will not make unique copies of registers, so we need to
10815 ensure we don't have unwanted sharing here. */
10816 if (reg == reg2)
10817 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
10819 if (reg == rreg)
10820 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
10822 real = copy_rtx (PATTERN (insn));
10824 if (reg2 != NULL_RTX)
10825 real = replace_rtx (real, reg2, rreg);
10827 real = replace_rtx (real, reg,
10828 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
10829 STACK_POINTER_REGNUM),
10830 GEN_INT (val)));
10832 /* We expect that 'real' is either a SET or a PARALLEL containing
10833 SETs (and possibly other stuff). In a PARALLEL, all the SETs
10834 are important so they all have to be marked RTX_FRAME_RELATED_P. */
10836 if (GET_CODE (real) == SET)
10838 rtx set = real;
10840 temp = simplify_rtx (SET_SRC (set));
10841 if (temp)
10842 SET_SRC (set) = temp;
10843 temp = simplify_rtx (SET_DEST (set));
10844 if (temp)
10845 SET_DEST (set) = temp;
10846 if (GET_CODE (SET_DEST (set)) == MEM)
10848 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
10849 if (temp)
10850 XEXP (SET_DEST (set), 0) = temp;
10853 else if (GET_CODE (real) == PARALLEL)
10855 int i;
10856 for (i = 0; i < XVECLEN (real, 0); i++)
10857 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
10859 rtx set = XVECEXP (real, 0, i);
10861 temp = simplify_rtx (SET_SRC (set));
10862 if (temp)
10863 SET_SRC (set) = temp;
10864 temp = simplify_rtx (SET_DEST (set));
10865 if (temp)
10866 SET_DEST (set) = temp;
10867 if (GET_CODE (SET_DEST (set)) == MEM)
10869 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
10870 if (temp)
10871 XEXP (SET_DEST (set), 0) = temp;
10873 RTX_FRAME_RELATED_P (set) = 1;
10876 else
10877 abort ();
10879 if (TARGET_SPE)
10880 real = spe_synthesize_frame_save (real);
10882 RTX_FRAME_RELATED_P (insn) = 1;
10883 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
10884 real,
10885 REG_NOTES (insn));
10888 /* Given an SPE frame note, return a PARALLEL of SETs with the
10889 original note, plus a synthetic register save. */
10891 static rtx
10892 spe_synthesize_frame_save (real)
10893 rtx real;
10895 rtx synth, offset, reg, real2;
10897 if (GET_CODE (real) != SET
10898 || GET_MODE (SET_SRC (real)) != V2SImode)
10899 return real;
10901 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
10902 frame related note. The parallel contains a set of the register
10903 being saved, and another set to a synthetic register (n+1200).
10904 This is so we can differentiate between 64-bit and 32-bit saves.
10905 Words cannot describe this nastiness. */
10907 if (GET_CODE (SET_DEST (real)) != MEM
10908 || GET_CODE (XEXP (SET_DEST (real), 0)) != PLUS
10909 || GET_CODE (SET_SRC (real)) != REG)
10910 abort ();
10912 /* Transform:
10913 (set (mem (plus (reg x) (const y)))
10914 (reg z))
10915 into:
10916 (set (mem (plus (reg x) (const y+4)))
10917 (reg z+1200))
10920 real2 = copy_rtx (real);
10921 PUT_MODE (SET_DEST (real2), SImode);
10922 reg = SET_SRC (real2);
10923 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
10924 synth = copy_rtx (real2);
10926 if (BYTES_BIG_ENDIAN)
10928 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
10929 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
10932 reg = SET_SRC (synth);
10934 synth = replace_rtx (synth, reg,
10935 gen_rtx_REG (SImode, REGNO (reg) + 1200));
10937 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
10938 synth = replace_rtx (synth, offset,
10939 GEN_INT (INTVAL (offset)
10940 + (BYTES_BIG_ENDIAN ? 0 : 4)));
10942 RTX_FRAME_RELATED_P (synth) = 1;
10943 RTX_FRAME_RELATED_P (real2) = 1;
10944 if (BYTES_BIG_ENDIAN)
10945 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
10946 else
10947 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
10949 return real;
10952 /* Returns an insn that has a vrsave set operation with the
10953 appropriate CLOBBERs. */
10955 static rtx
10956 generate_set_vrsave (reg, info, epiloguep)
10957 rtx reg;
10958 rs6000_stack_t *info;
10959 int epiloguep;
10961 int nclobs, i;
10962 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
10963 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
10965 clobs[0]
10966 = gen_rtx_SET (VOIDmode,
10967 vrsave,
10968 gen_rtx_UNSPEC_VOLATILE (SImode,
10969 gen_rtvec (2, reg, vrsave),
10970 30));
10972 nclobs = 1;
10974 /* We need to clobber the registers in the mask so the scheduler
10975 does not move sets to VRSAVE before sets of AltiVec registers.
10977 However, if the function receives nonlocal gotos, reload will set
10978 all call saved registers live. We will end up with:
10980 (set (reg 999) (mem))
10981 (parallel [ (set (reg vrsave) (unspec blah))
10982 (clobber (reg 999))])
10984 The clobber will cause the store into reg 999 to be dead, and
10985 flow will attempt to delete an epilogue insn. In this case, we
10986 need an unspec use/set of the register. */
10988 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
10989 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
10991 if (!epiloguep || call_used_regs [i])
10992 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
10993 gen_rtx_REG (V4SImode, i));
10994 else
10996 rtx reg = gen_rtx_REG (V4SImode, i);
10998 clobs[nclobs++]
10999 = gen_rtx_SET (VOIDmode,
11000 reg,
11001 gen_rtx_UNSPEC (V4SImode,
11002 gen_rtvec (1, reg), 27));
11006 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
11008 for (i = 0; i < nclobs; ++i)
11009 XVECEXP (insn, 0, i) = clobs[i];
11011 return insn;
11014 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
11015 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
11017 static void
11018 emit_frame_save (frame_reg, frame_ptr, mode, regno, offset, total_size)
11019 rtx frame_reg;
11020 rtx frame_ptr;
11021 enum machine_mode mode;
11022 unsigned int regno;
11023 int offset;
11024 int total_size;
11026 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
11027 rtx replacea, replaceb;
11029 int_rtx = GEN_INT (offset);
11031 /* Some cases that need register indexed addressing. */
11032 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
11033 || (TARGET_SPE_ABI
11034 && SPE_VECTOR_MODE (mode)
11035 && !SPE_CONST_OFFSET_OK (offset)))
11037 /* Whomever calls us must make sure r11 is available in the
11038 flow path of instructions in the prologue. */
11039 offset_rtx = gen_rtx_REG (Pmode, 11);
11040 emit_move_insn (offset_rtx, int_rtx);
11042 replacea = offset_rtx;
11043 replaceb = int_rtx;
11045 else
11047 offset_rtx = int_rtx;
11048 replacea = NULL_RTX;
11049 replaceb = NULL_RTX;
11052 reg = gen_rtx_REG (mode, regno);
11053 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
11054 mem = gen_rtx_MEM (mode, addr);
11055 set_mem_alias_set (mem, rs6000_sr_alias_set);
11057 insn = emit_move_insn (mem, reg);
11059 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
11062 /* Emit an offset memory reference suitable for a frame store, while
11063 converting to a valid addressing mode. */
11065 static rtx
11066 gen_frame_mem_offset (mode, reg, offset)
11067 enum machine_mode mode;
11068 rtx reg;
11069 int offset;
11071 rtx int_rtx, offset_rtx;
11073 int_rtx = GEN_INT (offset);
11075 if (TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
11077 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11078 emit_move_insn (offset_rtx, int_rtx);
11080 else
11081 offset_rtx = int_rtx;
11083 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
11086 /* Emit function prologue as insns. */
11088 void
11089 rs6000_emit_prologue ()
11091 rs6000_stack_t *info = rs6000_stack_info ();
11092 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
11093 int reg_size = TARGET_POWERPC64 ? 8 : 4;
11094 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
11095 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
11096 rtx frame_reg_rtx = sp_reg_rtx;
11097 rtx cr_save_rtx = NULL;
11098 rtx insn;
11099 int saving_FPRs_inline;
11100 int using_store_multiple;
11101 HOST_WIDE_INT sp_offset = 0;
11103 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11105 reg_mode = V2SImode;
11106 reg_size = 8;
11109 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
11110 && (!TARGET_SPE_ABI
11111 || info->spe_64bit_regs_used == 0)
11112 && info->first_gp_reg_save < 31);
11113 saving_FPRs_inline = (info->first_fp_reg_save == 64
11114 || FP_SAVE_INLINE (info->first_fp_reg_save));
11116 /* For V.4, update stack before we do any saving and set back pointer. */
11117 if (info->push_p && DEFAULT_ABI == ABI_V4)
11119 if (info->total_size < 32767)
11120 sp_offset = info->total_size;
11121 else
11122 frame_reg_rtx = frame_ptr_rtx;
11123 rs6000_emit_allocate_stack (info->total_size,
11124 (frame_reg_rtx != sp_reg_rtx
11125 && (info->cr_save_p
11126 || info->lr_save_p
11127 || info->first_fp_reg_save < 64
11128 || info->first_gp_reg_save < 32
11129 )));
11130 if (frame_reg_rtx != sp_reg_rtx)
11131 rs6000_emit_stack_tie ();
11134 /* Save AltiVec registers if needed. */
11135 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
11137 int i;
11139 /* There should be a non inline version of this, for when we
11140 are saving lots of vector registers. */
11141 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
11142 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
11144 rtx areg, savereg, mem;
11145 int offset;
11147 offset = info->altivec_save_offset + sp_offset
11148 + 16 * (i - info->first_altivec_reg_save);
11150 savereg = gen_rtx_REG (V4SImode, i);
11152 areg = gen_rtx_REG (Pmode, 0);
11153 emit_move_insn (areg, GEN_INT (offset));
11155 /* AltiVec addressing mode is [reg+reg]. */
11156 mem = gen_rtx_MEM (V4SImode,
11157 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
11159 set_mem_alias_set (mem, rs6000_sr_alias_set);
11161 insn = emit_move_insn (mem, savereg);
11163 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11164 areg, GEN_INT (offset));
11168 /* VRSAVE is a bit vector representing which AltiVec registers
11169 are used. The OS uses this to determine which vector
11170 registers to save on a context switch. We need to save
11171 VRSAVE on the stack frame, add whatever AltiVec registers we
11172 used in this function, and do the corresponding magic in the
11173 epilogue. */
11175 if (TARGET_ALTIVEC && info->vrsave_mask != 0)
11177 rtx reg, mem, vrsave;
11178 int offset;
11180 /* Get VRSAVE onto a GPR. */
11181 reg = gen_rtx_REG (SImode, 12);
11182 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
11183 if (TARGET_MACHO)
11184 emit_insn (gen_get_vrsave_internal (reg));
11185 else
11186 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
11188 /* Save VRSAVE. */
11189 offset = info->vrsave_save_offset + sp_offset;
11191 = gen_rtx_MEM (SImode,
11192 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
11193 set_mem_alias_set (mem, rs6000_sr_alias_set);
11194 insn = emit_move_insn (mem, reg);
11196 /* Include the registers in the mask. */
11197 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
11199 insn = emit_insn (generate_set_vrsave (reg, info, 0));
11202 /* If we use the link register, get it into r0. */
11203 if (info->lr_save_p)
11204 emit_move_insn (gen_rtx_REG (Pmode, 0),
11205 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
11207 /* If we need to save CR, put it into r12. */
11208 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
11210 cr_save_rtx = gen_rtx_REG (SImode, 12);
11211 emit_insn (gen_movesi_from_cr (cr_save_rtx));
11214 /* Do any required saving of fpr's. If only one or two to save, do
11215 it ourselves. Otherwise, call function. */
11216 if (saving_FPRs_inline)
11218 int i;
11219 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11220 if ((regs_ever_live[info->first_fp_reg_save+i]
11221 && ! call_used_regs[info->first_fp_reg_save+i]))
11222 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
11223 info->first_fp_reg_save + i,
11224 info->fp_save_offset + sp_offset + 8 * i,
11225 info->total_size);
11227 else if (info->first_fp_reg_save != 64)
11229 int i;
11230 char rname[30];
11231 const char *alloc_rname;
11232 rtvec p;
11233 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
11235 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
11236 gen_rtx_REG (Pmode,
11237 LINK_REGISTER_REGNUM));
11238 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
11239 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
11240 alloc_rname = ggc_strdup (rname);
11241 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
11242 gen_rtx_SYMBOL_REF (Pmode,
11243 alloc_rname));
11244 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11246 rtx addr, reg, mem;
11247 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
11248 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11249 GEN_INT (info->fp_save_offset
11250 + sp_offset + 8*i));
11251 mem = gen_rtx_MEM (DFmode, addr);
11252 set_mem_alias_set (mem, rs6000_sr_alias_set);
11254 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
11256 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11257 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11258 NULL_RTX, NULL_RTX);
11261 /* Save GPRs. This is done as a PARALLEL if we are using
11262 the store-multiple instructions. */
11263 if (using_store_multiple)
11265 rtvec p;
11266 int i;
11267 p = rtvec_alloc (32 - info->first_gp_reg_save);
11268 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11270 rtx addr, reg, mem;
11271 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
11272 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11273 GEN_INT (info->gp_save_offset
11274 + sp_offset
11275 + reg_size * i));
11276 mem = gen_rtx_MEM (reg_mode, addr);
11277 set_mem_alias_set (mem, rs6000_sr_alias_set);
11279 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
11281 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11282 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11283 NULL_RTX, NULL_RTX);
11285 else
11287 int i;
11288 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11289 if ((regs_ever_live[info->first_gp_reg_save+i]
11290 && ! call_used_regs[info->first_gp_reg_save+i])
11291 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
11292 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
11293 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
11295 rtx addr, reg, mem;
11296 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
11298 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11300 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
11301 rtx b;
11303 if (!SPE_CONST_OFFSET_OK (offset))
11305 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11306 emit_move_insn (b, GEN_INT (offset));
11308 else
11309 b = GEN_INT (offset);
11311 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
11312 mem = gen_rtx_MEM (V2SImode, addr);
11313 set_mem_alias_set (mem, rs6000_sr_alias_set);
11314 insn = emit_move_insn (mem, reg);
11316 if (GET_CODE (b) == CONST_INT)
11317 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11318 NULL_RTX, NULL_RTX);
11319 else
11320 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11321 b, GEN_INT (offset));
11323 else
11325 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11326 GEN_INT (info->gp_save_offset
11327 + sp_offset
11328 + reg_size * i));
11329 mem = gen_rtx_MEM (reg_mode, addr);
11330 set_mem_alias_set (mem, rs6000_sr_alias_set);
11332 insn = emit_move_insn (mem, reg);
11333 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11334 NULL_RTX, NULL_RTX);
11339 /* ??? There's no need to emit actual instructions here, but it's the
11340 easiest way to get the frame unwind information emitted. */
11341 if (current_function_calls_eh_return)
11343 unsigned int i, regno;
11345 for (i = 0; ; ++i)
11347 regno = EH_RETURN_DATA_REGNO (i);
11348 if (regno == INVALID_REGNUM)
11349 break;
11351 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
11352 info->ehrd_offset + sp_offset
11353 + reg_size * (int) i,
11354 info->total_size);
11358 /* Save lr if we used it. */
11359 if (info->lr_save_p)
11361 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11362 GEN_INT (info->lr_save_offset + sp_offset));
11363 rtx reg = gen_rtx_REG (Pmode, 0);
11364 rtx mem = gen_rtx_MEM (Pmode, addr);
11365 /* This should not be of rs6000_sr_alias_set, because of
11366 __builtin_return_address. */
11368 insn = emit_move_insn (mem, reg);
11369 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11370 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
11373 /* Save CR if we use any that must be preserved. */
11374 if (info->cr_save_p)
11376 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11377 GEN_INT (info->cr_save_offset + sp_offset));
11378 rtx mem = gen_rtx_MEM (SImode, addr);
11380 set_mem_alias_set (mem, rs6000_sr_alias_set);
11382 /* If r12 was used to hold the original sp, copy cr into r0 now
11383 that it's free. */
11384 if (REGNO (frame_reg_rtx) == 12)
11386 cr_save_rtx = gen_rtx_REG (SImode, 0);
11387 emit_insn (gen_movesi_from_cr (cr_save_rtx));
11389 insn = emit_move_insn (mem, cr_save_rtx);
11391 /* Now, there's no way that dwarf2out_frame_debug_expr is going
11392 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
11393 But that's OK. All we have to do is specify that _one_ condition
11394 code register is saved in this stack slot. The thrower's epilogue
11395 will then restore all the call-saved registers.
11396 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
11397 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11398 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
11401 /* Update stack and set back pointer unless this is V.4,
11402 for which it was done previously. */
11403 if (info->push_p && DEFAULT_ABI != ABI_V4)
11404 rs6000_emit_allocate_stack (info->total_size, FALSE);
11406 /* Set frame pointer, if needed. */
11407 if (frame_pointer_needed)
11409 insn = emit_move_insn (gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM),
11410 sp_reg_rtx);
11411 RTX_FRAME_RELATED_P (insn) = 1;
11414 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
11415 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
11416 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
11417 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
11419 /* If emit_load_toc_table will use the link register, we need to save
11420 it. We use R12 for this purpose because emit_load_toc_table
11421 can use register 0. This allows us to use a plain 'blr' to return
11422 from the procedure more often. */
11423 int save_LR_around_toc_setup = (TARGET_ELF
11424 && DEFAULT_ABI != ABI_AIX
11425 && flag_pic
11426 && ! info->lr_save_p
11427 && EXIT_BLOCK_PTR->pred != NULL);
11428 if (save_LR_around_toc_setup)
11430 rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
11431 rs6000_maybe_dead (emit_move_insn (frame_ptr_rtx, lr));
11432 rs6000_emit_load_toc_table (TRUE);
11433 rs6000_maybe_dead (emit_move_insn (lr, frame_ptr_rtx));
11435 else
11436 rs6000_emit_load_toc_table (TRUE);
11439 #if TARGET_MACHO
11440 if (DEFAULT_ABI == ABI_DARWIN
11441 && flag_pic && current_function_uses_pic_offset_table)
11443 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
11444 const char *picbase = machopic_function_base_name ();
11445 rtx src = gen_rtx_SYMBOL_REF (Pmode, picbase);
11447 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest, src)));
11449 rs6000_maybe_dead (
11450 emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
11451 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
11453 #endif
11456 /* Write function prologue. */
11458 static void
11459 rs6000_output_function_prologue (file, size)
11460 FILE *file;
11461 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
11463 rs6000_stack_t *info = rs6000_stack_info ();
11465 if (TARGET_DEBUG_STACK)
11466 debug_stack_info (info);
11468 /* Write .extern for any function we will call to save and restore
11469 fp values. */
11470 if (info->first_fp_reg_save < 64
11471 && !FP_SAVE_INLINE (info->first_fp_reg_save))
11472 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
11473 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
11474 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
11475 RESTORE_FP_SUFFIX);
11477 /* Write .extern for AIX common mode routines, if needed. */
11478 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
11480 fputs ("\t.extern __mulh\n", file);
11481 fputs ("\t.extern __mull\n", file);
11482 fputs ("\t.extern __divss\n", file);
11483 fputs ("\t.extern __divus\n", file);
11484 fputs ("\t.extern __quoss\n", file);
11485 fputs ("\t.extern __quous\n", file);
11486 common_mode_defined = 1;
11489 if (! HAVE_prologue)
11491 start_sequence ();
11493 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
11494 the "toplevel" insn chain. */
11495 emit_note (0, NOTE_INSN_DELETED);
11496 rs6000_emit_prologue ();
11497 emit_note (0, NOTE_INSN_DELETED);
11499 /* Expand INSN_ADDRESSES so final() doesn't crash. */
11501 rtx insn;
11502 unsigned addr = 0;
11503 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
11505 INSN_ADDRESSES_NEW (insn, addr);
11506 addr += 4;
11510 if (TARGET_DEBUG_STACK)
11511 debug_rtx_list (get_insns (), 100);
11512 final (get_insns (), file, FALSE, FALSE);
11513 end_sequence ();
11516 rs6000_pic_labelno++;
11519 /* Emit function epilogue as insns.
11521 At present, dwarf2out_frame_debug_expr doesn't understand
11522 register restores, so we don't bother setting RTX_FRAME_RELATED_P
11523 anywhere in the epilogue. Most of the insns below would in any case
11524 need special notes to explain where r11 is in relation to the stack. */
11526 void
11527 rs6000_emit_epilogue (sibcall)
11528 int sibcall;
11530 rs6000_stack_t *info;
11531 int restoring_FPRs_inline;
11532 int using_load_multiple;
11533 int using_mfcr_multiple;
11534 int use_backchain_to_restore_sp;
11535 int sp_offset = 0;
11536 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
11537 rtx frame_reg_rtx = sp_reg_rtx;
11538 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
11539 int reg_size = TARGET_POWERPC64 ? 8 : 4;
11540 int i;
11542 info = rs6000_stack_info ();
11544 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11546 reg_mode = V2SImode;
11547 reg_size = 8;
11550 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
11551 && (!TARGET_SPE_ABI
11552 || info->spe_64bit_regs_used == 0)
11553 && info->first_gp_reg_save < 31);
11554 restoring_FPRs_inline = (sibcall
11555 || current_function_calls_eh_return
11556 || info->first_fp_reg_save == 64
11557 || FP_SAVE_INLINE (info->first_fp_reg_save));
11558 use_backchain_to_restore_sp = (frame_pointer_needed
11559 || current_function_calls_alloca
11560 || info->total_size > 32767);
11561 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
11562 || rs6000_cpu == PROCESSOR_PPC603
11563 || rs6000_cpu == PROCESSOR_PPC750
11564 || optimize_size);
11566 /* If we have a frame pointer, a call to alloca, or a large stack
11567 frame, restore the old stack pointer using the backchain. Otherwise,
11568 we know what size to update it with. */
11569 if (use_backchain_to_restore_sp)
11571 /* Under V.4, don't reset the stack pointer until after we're done
11572 loading the saved registers. */
11573 if (DEFAULT_ABI == ABI_V4)
11574 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
11576 emit_move_insn (frame_reg_rtx,
11577 gen_rtx_MEM (Pmode, sp_reg_rtx));
11580 else if (info->push_p)
11582 if (DEFAULT_ABI == ABI_V4)
11583 sp_offset = info->total_size;
11584 else
11586 emit_insn (TARGET_32BIT
11587 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
11588 GEN_INT (info->total_size))
11589 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
11590 GEN_INT (info->total_size)));
11594 /* Restore AltiVec registers if needed. */
11595 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
11597 int i;
11599 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
11600 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
11602 rtx addr, areg, mem;
11604 areg = gen_rtx_REG (Pmode, 0);
11605 emit_move_insn
11606 (areg, GEN_INT (info->altivec_save_offset
11607 + sp_offset
11608 + 16 * (i - info->first_altivec_reg_save)));
11610 /* AltiVec addressing mode is [reg+reg]. */
11611 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
11612 mem = gen_rtx_MEM (V4SImode, addr);
11613 set_mem_alias_set (mem, rs6000_sr_alias_set);
11615 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
11619 /* Restore VRSAVE if needed. */
11620 if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
11622 rtx addr, mem, reg;
11624 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11625 GEN_INT (info->vrsave_save_offset + sp_offset));
11626 mem = gen_rtx_MEM (SImode, addr);
11627 set_mem_alias_set (mem, rs6000_sr_alias_set);
11628 reg = gen_rtx_REG (SImode, 12);
11629 emit_move_insn (reg, mem);
11631 emit_insn (generate_set_vrsave (reg, info, 1));
11634 /* Get the old lr if we saved it. */
11635 if (info->lr_save_p)
11637 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
11638 info->lr_save_offset + sp_offset);
11640 set_mem_alias_set (mem, rs6000_sr_alias_set);
11642 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
11645 /* Get the old cr if we saved it. */
11646 if (info->cr_save_p)
11648 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11649 GEN_INT (info->cr_save_offset + sp_offset));
11650 rtx mem = gen_rtx_MEM (SImode, addr);
11652 set_mem_alias_set (mem, rs6000_sr_alias_set);
11654 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
11657 /* Set LR here to try to overlap restores below. */
11658 if (info->lr_save_p)
11659 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
11660 gen_rtx_REG (Pmode, 0));
11662 /* Load exception handler data registers, if needed. */
11663 if (current_function_calls_eh_return)
11665 unsigned int i, regno;
11667 for (i = 0; ; ++i)
11669 rtx mem;
11671 regno = EH_RETURN_DATA_REGNO (i);
11672 if (regno == INVALID_REGNUM)
11673 break;
11675 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
11676 info->ehrd_offset + sp_offset
11677 + reg_size * (int) i);
11678 set_mem_alias_set (mem, rs6000_sr_alias_set);
11680 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
11684 /* Restore GPRs. This is done as a PARALLEL if we are using
11685 the load-multiple instructions. */
11686 if (using_load_multiple)
11688 rtvec p;
11689 p = rtvec_alloc (32 - info->first_gp_reg_save);
11690 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11692 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11693 GEN_INT (info->gp_save_offset
11694 + sp_offset
11695 + reg_size * i));
11696 rtx mem = gen_rtx_MEM (reg_mode, addr);
11698 set_mem_alias_set (mem, rs6000_sr_alias_set);
11700 RTVEC_ELT (p, i) =
11701 gen_rtx_SET (VOIDmode,
11702 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
11703 mem);
11705 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11707 else
11708 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11709 if ((regs_ever_live[info->first_gp_reg_save+i]
11710 && ! call_used_regs[info->first_gp_reg_save+i])
11711 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
11712 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
11713 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
11715 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11716 GEN_INT (info->gp_save_offset
11717 + sp_offset
11718 + reg_size * i));
11719 rtx mem = gen_rtx_MEM (reg_mode, addr);
11721 /* Restore 64-bit quantities for SPE. */
11722 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11724 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
11725 rtx b;
11727 if (!SPE_CONST_OFFSET_OK (offset))
11729 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11730 emit_move_insn (b, GEN_INT (offset));
11732 else
11733 b = GEN_INT (offset);
11735 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
11736 mem = gen_rtx_MEM (V2SImode, addr);
11739 set_mem_alias_set (mem, rs6000_sr_alias_set);
11741 emit_move_insn (gen_rtx_REG (reg_mode,
11742 info->first_gp_reg_save + i), mem);
11745 /* Restore fpr's if we need to do it without calling a function. */
11746 if (restoring_FPRs_inline)
11747 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11748 if ((regs_ever_live[info->first_fp_reg_save+i]
11749 && ! call_used_regs[info->first_fp_reg_save+i]))
11751 rtx addr, mem;
11752 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11753 GEN_INT (info->fp_save_offset
11754 + sp_offset
11755 + 8 * i));
11756 mem = gen_rtx_MEM (DFmode, addr);
11757 set_mem_alias_set (mem, rs6000_sr_alias_set);
11759 emit_move_insn (gen_rtx_REG (DFmode,
11760 info->first_fp_reg_save + i),
11761 mem);
11764 /* If we saved cr, restore it here. Just those that were used. */
11765 if (info->cr_save_p)
11767 rtx r12_rtx = gen_rtx_REG (SImode, 12);
11768 int count = 0;
11770 if (using_mfcr_multiple)
11772 for (i = 0; i < 8; i++)
11773 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
11774 count++;
11775 if (count == 0)
11776 abort ();
11779 if (using_mfcr_multiple && count > 1)
11781 rtvec p;
11782 int ndx;
11784 p = rtvec_alloc (count);
11786 ndx = 0;
11787 for (i = 0; i < 8; i++)
11788 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
11790 rtvec r = rtvec_alloc (2);
11791 RTVEC_ELT (r, 0) = r12_rtx;
11792 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
11793 RTVEC_ELT (p, ndx) =
11794 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
11795 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
11796 ndx++;
11798 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11799 if (ndx != count)
11800 abort ();
11802 else
11803 for (i = 0; i < 8; i++)
11804 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
11806 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
11807 CR0_REGNO+i),
11808 r12_rtx));
11812 /* If this is V.4, unwind the stack pointer after all of the loads
11813 have been done. We need to emit a block here so that sched
11814 doesn't decide to move the sp change before the register restores
11815 (which may not have any obvious dependency on the stack). This
11816 doesn't hurt performance, because there is no scheduling that can
11817 be done after this point. */
11818 if (DEFAULT_ABI == ABI_V4)
11820 if (frame_reg_rtx != sp_reg_rtx)
11821 rs6000_emit_stack_tie ();
11823 if (use_backchain_to_restore_sp)
11825 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
11827 else if (sp_offset != 0)
11829 emit_insn (TARGET_32BIT
11830 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
11831 GEN_INT (sp_offset))
11832 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
11833 GEN_INT (sp_offset)));
11837 if (current_function_calls_eh_return)
11839 rtx sa = EH_RETURN_STACKADJ_RTX;
11840 emit_insn (TARGET_32BIT
11841 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
11842 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
11845 if (!sibcall)
11847 rtvec p;
11848 if (! restoring_FPRs_inline)
11849 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
11850 else
11851 p = rtvec_alloc (2);
11853 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
11854 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
11855 gen_rtx_REG (Pmode,
11856 LINK_REGISTER_REGNUM));
11858 /* If we have to restore more than two FP registers, branch to the
11859 restore function. It will return to our caller. */
11860 if (! restoring_FPRs_inline)
11862 int i;
11863 char rname[30];
11864 const char *alloc_rname;
11866 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
11867 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
11868 alloc_rname = ggc_strdup (rname);
11869 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
11870 gen_rtx_SYMBOL_REF (Pmode,
11871 alloc_rname));
11873 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11875 rtx addr, mem;
11876 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
11877 GEN_INT (info->fp_save_offset + 8*i));
11878 mem = gen_rtx_MEM (DFmode, addr);
11879 set_mem_alias_set (mem, rs6000_sr_alias_set);
11881 RTVEC_ELT (p, i+3) =
11882 gen_rtx_SET (VOIDmode,
11883 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
11884 mem);
11888 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
11892 /* Write function epilogue. */
11894 static void
11895 rs6000_output_function_epilogue (file, size)
11896 FILE *file;
11897 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
11899 rs6000_stack_t *info = rs6000_stack_info ();
11901 if (! HAVE_epilogue)
11903 rtx insn = get_last_insn ();
11904 /* If the last insn was a BARRIER, we don't have to write anything except
11905 the trace table. */
11906 if (GET_CODE (insn) == NOTE)
11907 insn = prev_nonnote_insn (insn);
11908 if (insn == 0 || GET_CODE (insn) != BARRIER)
11910 /* This is slightly ugly, but at least we don't have two
11911 copies of the epilogue-emitting code. */
11912 start_sequence ();
11914 /* A NOTE_INSN_DELETED is supposed to be at the start
11915 and end of the "toplevel" insn chain. */
11916 emit_note (0, NOTE_INSN_DELETED);
11917 rs6000_emit_epilogue (FALSE);
11918 emit_note (0, NOTE_INSN_DELETED);
11920 /* Expand INSN_ADDRESSES so final() doesn't crash. */
11922 rtx insn;
11923 unsigned addr = 0;
11924 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
11926 INSN_ADDRESSES_NEW (insn, addr);
11927 addr += 4;
11931 if (TARGET_DEBUG_STACK)
11932 debug_rtx_list (get_insns (), 100);
11933 final (get_insns (), file, FALSE, FALSE);
11934 end_sequence ();
11938 /* Output a traceback table here. See /usr/include/sys/debug.h for info
11939 on its format.
11941 We don't output a traceback table if -finhibit-size-directive was
11942 used. The documentation for -finhibit-size-directive reads
11943 ``don't output a @code{.size} assembler directive, or anything
11944 else that would cause trouble if the function is split in the
11945 middle, and the two halves are placed at locations far apart in
11946 memory.'' The traceback table has this property, since it
11947 includes the offset from the start of the function to the
11948 traceback table itself.
11950 System V.4 Powerpc's (and the embedded ABI derived from it) use a
11951 different traceback table. */
11952 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
11953 && rs6000_traceback != traceback_none)
11955 const char *fname = NULL;
11956 const char *language_string = lang_hooks.name;
11957 int fixed_parms = 0, float_parms = 0, parm_info = 0;
11958 int i;
11959 int optional_tbtab;
11961 if (rs6000_traceback == traceback_full)
11962 optional_tbtab = 1;
11963 else if (rs6000_traceback == traceback_part)
11964 optional_tbtab = 0;
11965 else
11966 optional_tbtab = !optimize_size && !TARGET_ELF;
11968 if (optional_tbtab)
11970 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
11971 while (*fname == '.') /* V.4 encodes . in the name */
11972 fname++;
11974 /* Need label immediately before tbtab, so we can compute
11975 its offset from the function start. */
11976 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
11977 ASM_OUTPUT_LABEL (file, fname);
11980 /* The .tbtab pseudo-op can only be used for the first eight
11981 expressions, since it can't handle the possibly variable
11982 length fields that follow. However, if you omit the optional
11983 fields, the assembler outputs zeros for all optional fields
11984 anyways, giving each variable length field is minimum length
11985 (as defined in sys/debug.h). Thus we can not use the .tbtab
11986 pseudo-op at all. */
11988 /* An all-zero word flags the start of the tbtab, for debuggers
11989 that have to find it by searching forward from the entry
11990 point or from the current pc. */
11991 fputs ("\t.long 0\n", file);
11993 /* Tbtab format type. Use format type 0. */
11994 fputs ("\t.byte 0,", file);
11996 /* Language type. Unfortunately, there doesn't seem to be any
11997 official way to get this info, so we use language_string. C
11998 is 0. C++ is 9. No number defined for Obj-C, so use the
11999 value for C for now. There is no official value for Java,
12000 although IBM appears to be using 13. There is no official value
12001 for Chill, so we've chosen 44 pseudo-randomly. */
12002 if (! strcmp (language_string, "GNU C")
12003 || ! strcmp (language_string, "GNU Objective-C"))
12004 i = 0;
12005 else if (! strcmp (language_string, "GNU F77"))
12006 i = 1;
12007 else if (! strcmp (language_string, "GNU Ada"))
12008 i = 3;
12009 else if (! strcmp (language_string, "GNU Pascal"))
12010 i = 2;
12011 else if (! strcmp (language_string, "GNU C++"))
12012 i = 9;
12013 else if (! strcmp (language_string, "GNU Java"))
12014 i = 13;
12015 else if (! strcmp (language_string, "GNU CHILL"))
12016 i = 44;
12017 else
12018 abort ();
12019 fprintf (file, "%d,", i);
12021 /* 8 single bit fields: global linkage (not set for C extern linkage,
12022 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
12023 from start of procedure stored in tbtab, internal function, function
12024 has controlled storage, function has no toc, function uses fp,
12025 function logs/aborts fp operations. */
12026 /* Assume that fp operations are used if any fp reg must be saved. */
12027 fprintf (file, "%d,",
12028 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
12030 /* 6 bitfields: function is interrupt handler, name present in
12031 proc table, function calls alloca, on condition directives
12032 (controls stack walks, 3 bits), saves condition reg, saves
12033 link reg. */
12034 /* The `function calls alloca' bit seems to be set whenever reg 31 is
12035 set up as a frame pointer, even when there is no alloca call. */
12036 fprintf (file, "%d,",
12037 ((optional_tbtab << 6)
12038 | ((optional_tbtab & frame_pointer_needed) << 5)
12039 | (info->cr_save_p << 1)
12040 | (info->lr_save_p)));
12042 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
12043 (6 bits). */
12044 fprintf (file, "%d,",
12045 (info->push_p << 7) | (64 - info->first_fp_reg_save));
12047 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
12048 fprintf (file, "%d,", (32 - first_reg_to_save ()));
12050 if (optional_tbtab)
12052 /* Compute the parameter info from the function decl argument
12053 list. */
12054 tree decl;
12055 int next_parm_info_bit = 31;
12057 for (decl = DECL_ARGUMENTS (current_function_decl);
12058 decl; decl = TREE_CHAIN (decl))
12060 rtx parameter = DECL_INCOMING_RTL (decl);
12061 enum machine_mode mode = GET_MODE (parameter);
12063 if (GET_CODE (parameter) == REG)
12065 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
12067 int bits;
12069 float_parms++;
12071 if (mode == SFmode)
12072 bits = 0x2;
12073 else if (mode == DFmode || mode == TFmode)
12074 bits = 0x3;
12075 else
12076 abort ();
12078 /* If only one bit will fit, don't or in this entry. */
12079 if (next_parm_info_bit > 0)
12080 parm_info |= (bits << (next_parm_info_bit - 1));
12081 next_parm_info_bit -= 2;
12083 else
12085 fixed_parms += ((GET_MODE_SIZE (mode)
12086 + (UNITS_PER_WORD - 1))
12087 / UNITS_PER_WORD);
12088 next_parm_info_bit -= 1;
12094 /* Number of fixed point parameters. */
12095 /* This is actually the number of words of fixed point parameters; thus
12096 an 8 byte struct counts as 2; and thus the maximum value is 8. */
12097 fprintf (file, "%d,", fixed_parms);
12099 /* 2 bitfields: number of floating point parameters (7 bits), parameters
12100 all on stack. */
12101 /* This is actually the number of fp registers that hold parameters;
12102 and thus the maximum value is 13. */
12103 /* Set parameters on stack bit if parameters are not in their original
12104 registers, regardless of whether they are on the stack? Xlc
12105 seems to set the bit when not optimizing. */
12106 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
12108 if (! optional_tbtab)
12109 return;
12111 /* Optional fields follow. Some are variable length. */
12113 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
12114 11 double float. */
12115 /* There is an entry for each parameter in a register, in the order that
12116 they occur in the parameter list. Any intervening arguments on the
12117 stack are ignored. If the list overflows a long (max possible length
12118 34 bits) then completely leave off all elements that don't fit. */
12119 /* Only emit this long if there was at least one parameter. */
12120 if (fixed_parms || float_parms)
12121 fprintf (file, "\t.long %d\n", parm_info);
12123 /* Offset from start of code to tb table. */
12124 fputs ("\t.long ", file);
12125 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
12126 #if TARGET_AIX
12127 RS6000_OUTPUT_BASENAME (file, fname);
12128 #else
12129 assemble_name (file, fname);
12130 #endif
12131 fputs ("-.", file);
12132 #if TARGET_AIX
12133 RS6000_OUTPUT_BASENAME (file, fname);
12134 #else
12135 assemble_name (file, fname);
12136 #endif
12137 putc ('\n', file);
12139 /* Interrupt handler mask. */
12140 /* Omit this long, since we never set the interrupt handler bit
12141 above. */
12143 /* Number of CTL (controlled storage) anchors. */
12144 /* Omit this long, since the has_ctl bit is never set above. */
12146 /* Displacement into stack of each CTL anchor. */
12147 /* Omit this list of longs, because there are no CTL anchors. */
12149 /* Length of function name. */
12150 if (*fname == '*')
12151 ++fname;
12152 fprintf (file, "\t.short %d\n", (int) strlen (fname));
12154 /* Function name. */
12155 assemble_string (fname, strlen (fname));
12157 /* Register for alloca automatic storage; this is always reg 31.
12158 Only emit this if the alloca bit was set above. */
12159 if (frame_pointer_needed)
12160 fputs ("\t.byte 31\n", file);
12162 fputs ("\t.align 2\n", file);
12166 /* A C compound statement that outputs the assembler code for a thunk
12167 function, used to implement C++ virtual function calls with
12168 multiple inheritance. The thunk acts as a wrapper around a virtual
12169 function, adjusting the implicit object parameter before handing
12170 control off to the real function.
12172 First, emit code to add the integer DELTA to the location that
12173 contains the incoming first argument. Assume that this argument
12174 contains a pointer, and is the one used to pass the `this' pointer
12175 in C++. This is the incoming argument *before* the function
12176 prologue, e.g. `%o0' on a sparc. The addition must preserve the
12177 values of all other incoming arguments.
12179 After the addition, emit code to jump to FUNCTION, which is a
12180 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
12181 not touch the return address. Hence returning from FUNCTION will
12182 return to whoever called the current `thunk'.
12184 The effect must be as if FUNCTION had been called directly with the
12185 adjusted first argument. This macro is responsible for emitting
12186 all of the code for a thunk function; output_function_prologue()
12187 and output_function_epilogue() are not invoked.
12189 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
12190 been extracted from it.) It might possibly be useful on some
12191 targets, but probably not.
12193 If you do not define this macro, the target-independent code in the
12194 C++ frontend will generate a less efficient heavyweight thunk that
12195 calls FUNCTION instead of jumping to it. The generic approach does
12196 not support varargs. */
12198 static void
12199 rs6000_output_mi_thunk (file, thunk_fndecl, delta, vcall_offset, function)
12200 FILE *file;
12201 tree thunk_fndecl ATTRIBUTE_UNUSED;
12202 HOST_WIDE_INT delta;
12203 HOST_WIDE_INT vcall_offset;
12204 tree function;
12206 rtx this, insn, funexp;
12208 reload_completed = 1;
12209 no_new_pseudos = 1;
12211 /* Mark the end of the (empty) prologue. */
12212 emit_note (NULL, NOTE_INSN_PROLOGUE_END);
12214 /* Find the "this" pointer. If the function returns a structure,
12215 the structure return pointer is in r3. */
12216 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function))))
12217 this = gen_rtx_REG (Pmode, 4);
12218 else
12219 this = gen_rtx_REG (Pmode, 3);
12221 /* Apply the constant offset, if required. */
12222 if (delta)
12224 rtx delta_rtx = GEN_INT (delta);
12225 emit_insn (TARGET_32BIT
12226 ? gen_addsi3 (this, this, delta_rtx)
12227 : gen_adddi3 (this, this, delta_rtx));
12230 /* Apply the offset from the vtable, if required. */
12231 if (vcall_offset)
12233 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
12234 rtx tmp = gen_rtx_REG (Pmode, 12);
12236 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
12237 emit_insn (TARGET_32BIT
12238 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
12239 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
12240 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
12241 emit_insn (TARGET_32BIT
12242 ? gen_addsi3 (this, this, tmp)
12243 : gen_adddi3 (this, this, tmp));
12246 /* Generate a tail call to the target function. */
12247 if (!TREE_USED (function))
12249 assemble_external (function);
12250 TREE_USED (function) = 1;
12252 funexp = XEXP (DECL_RTL (function), 0);
12253 SYMBOL_REF_FLAGS (funexp) &= ~SYMBOL_FLAG_LOCAL;
12254 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
12256 #if TARGET_MACHO
12257 if (MACHOPIC_INDIRECT)
12258 funexp = machopic_indirect_call_target (funexp);
12259 #endif
12261 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
12262 generate sibcall RTL explicitly to avoid constraint abort. */
12263 insn = emit_call_insn (
12264 gen_rtx_PARALLEL (VOIDmode,
12265 gen_rtvec (4,
12266 gen_rtx_CALL (VOIDmode,
12267 funexp, const0_rtx),
12268 gen_rtx_USE (VOIDmode, const0_rtx),
12269 gen_rtx_USE (VOIDmode,
12270 gen_rtx_REG (SImode,
12271 LINK_REGISTER_REGNUM)),
12272 gen_rtx_RETURN (VOIDmode))));
12273 SIBLING_CALL_P (insn) = 1;
12274 emit_barrier ();
12276 /* Run just enough of rest_of_compilation to get the insns emitted.
12277 There's not really enough bulk here to make other passes such as
12278 instruction scheduling worth while. Note that use_thunk calls
12279 assemble_start_function and assemble_end_function. */
12280 insn = get_insns ();
12281 shorten_branches (insn);
12282 final_start_function (insn, file, 1);
12283 final (insn, file, 1, 0);
12284 final_end_function ();
12286 reload_completed = 0;
12287 no_new_pseudos = 0;
12290 /* A quick summary of the various types of 'constant-pool tables'
12291 under PowerPC:
12293 Target Flags Name One table per
12294 AIX (none) AIX TOC object file
12295 AIX -mfull-toc AIX TOC object file
12296 AIX -mminimal-toc AIX minimal TOC translation unit
12297 SVR4/EABI (none) SVR4 SDATA object file
12298 SVR4/EABI -fpic SVR4 pic object file
12299 SVR4/EABI -fPIC SVR4 PIC translation unit
12300 SVR4/EABI -mrelocatable EABI TOC function
12301 SVR4/EABI -maix AIX TOC object file
12302 SVR4/EABI -maix -mminimal-toc
12303 AIX minimal TOC translation unit
12305 Name Reg. Set by entries contains:
12306 made by addrs? fp? sum?
12308 AIX TOC 2 crt0 as Y option option
12309 AIX minimal TOC 30 prolog gcc Y Y option
12310 SVR4 SDATA 13 crt0 gcc N Y N
12311 SVR4 pic 30 prolog ld Y not yet N
12312 SVR4 PIC 30 prolog gcc Y option option
12313 EABI TOC 30 prolog gcc Y option option
12317 /* Hash functions for the hash table. */
12319 static unsigned
12320 rs6000_hash_constant (k)
12321 rtx k;
12323 enum rtx_code code = GET_CODE (k);
12324 enum machine_mode mode = GET_MODE (k);
12325 unsigned result = (code << 3) ^ mode;
12326 const char *format;
12327 int flen, fidx;
12329 format = GET_RTX_FORMAT (code);
12330 flen = strlen (format);
12331 fidx = 0;
12333 switch (code)
12335 case LABEL_REF:
12336 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
12338 case CONST_DOUBLE:
12339 if (mode != VOIDmode)
12340 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
12341 flen = 2;
12342 break;
12344 case CODE_LABEL:
12345 fidx = 3;
12346 break;
12348 default:
12349 break;
12352 for (; fidx < flen; fidx++)
12353 switch (format[fidx])
12355 case 's':
12357 unsigned i, len;
12358 const char *str = XSTR (k, fidx);
12359 len = strlen (str);
12360 result = result * 613 + len;
12361 for (i = 0; i < len; i++)
12362 result = result * 613 + (unsigned) str[i];
12363 break;
12365 case 'u':
12366 case 'e':
12367 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
12368 break;
12369 case 'i':
12370 case 'n':
12371 result = result * 613 + (unsigned) XINT (k, fidx);
12372 break;
12373 case 'w':
12374 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
12375 result = result * 613 + (unsigned) XWINT (k, fidx);
12376 else
12378 size_t i;
12379 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
12380 result = result * 613 + (unsigned) (XWINT (k, fidx)
12381 >> CHAR_BIT * i);
12383 break;
12384 case '0':
12385 break;
12386 default:
12387 abort ();
12390 return result;
12393 static unsigned
12394 toc_hash_function (hash_entry)
12395 const void * hash_entry;
12397 const struct toc_hash_struct *thc =
12398 (const struct toc_hash_struct *) hash_entry;
12399 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
12402 /* Compare H1 and H2 for equivalence. */
12404 static int
12405 toc_hash_eq (h1, h2)
12406 const void * h1;
12407 const void * h2;
12409 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
12410 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
12412 if (((const struct toc_hash_struct *) h1)->key_mode
12413 != ((const struct toc_hash_struct *) h2)->key_mode)
12414 return 0;
12416 return rtx_equal_p (r1, r2);
12419 /* These are the names given by the C++ front-end to vtables, and
12420 vtable-like objects. Ideally, this logic should not be here;
12421 instead, there should be some programmatic way of inquiring as
12422 to whether or not an object is a vtable. */
12424 #define VTABLE_NAME_P(NAME) \
12425 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
12426 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
12427 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
12428 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
12430 void
12431 rs6000_output_symbol_ref (file, x)
12432 FILE *file;
12433 rtx x;
12435 /* Currently C++ toc references to vtables can be emitted before it
12436 is decided whether the vtable is public or private. If this is
12437 the case, then the linker will eventually complain that there is
12438 a reference to an unknown section. Thus, for vtables only,
12439 we emit the TOC reference to reference the symbol and not the
12440 section. */
12441 const char *name = XSTR (x, 0);
12443 if (VTABLE_NAME_P (name))
12445 RS6000_OUTPUT_BASENAME (file, name);
12447 else
12448 assemble_name (file, name);
12451 /* Output a TOC entry. We derive the entry name from what is being
12452 written. */
12454 void
12455 output_toc (file, x, labelno, mode)
12456 FILE *file;
12457 rtx x;
12458 int labelno;
12459 enum machine_mode mode;
12461 char buf[256];
12462 const char *name = buf;
12463 const char *real_name;
12464 rtx base = x;
12465 int offset = 0;
12467 if (TARGET_NO_TOC)
12468 abort ();
12470 /* When the linker won't eliminate them, don't output duplicate
12471 TOC entries (this happens on AIX if there is any kind of TOC,
12472 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
12473 CODE_LABELs. */
12474 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
12476 struct toc_hash_struct *h;
12477 void * * found;
12479 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
12480 time because GGC is not initialised at that point. */
12481 if (toc_hash_table == NULL)
12482 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
12483 toc_hash_eq, NULL);
12485 h = ggc_alloc (sizeof (*h));
12486 h->key = x;
12487 h->key_mode = mode;
12488 h->labelno = labelno;
12490 found = htab_find_slot (toc_hash_table, h, 1);
12491 if (*found == NULL)
12492 *found = h;
12493 else /* This is indeed a duplicate.
12494 Set this label equal to that label. */
12496 fputs ("\t.set ", file);
12497 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
12498 fprintf (file, "%d,", labelno);
12499 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
12500 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
12501 found)->labelno));
12502 return;
12506 /* If we're going to put a double constant in the TOC, make sure it's
12507 aligned properly when strict alignment is on. */
12508 if (GET_CODE (x) == CONST_DOUBLE
12509 && STRICT_ALIGNMENT
12510 && GET_MODE_BITSIZE (mode) >= 64
12511 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
12512 ASM_OUTPUT_ALIGN (file, 3);
12515 (*targetm.asm_out.internal_label) (file, "LC", labelno);
12517 /* Handle FP constants specially. Note that if we have a minimal
12518 TOC, things we put here aren't actually in the TOC, so we can allow
12519 FP constants. */
12520 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == TFmode)
12522 REAL_VALUE_TYPE rv;
12523 long k[4];
12525 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
12526 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
12528 if (TARGET_64BIT)
12530 if (TARGET_MINIMAL_TOC)
12531 fputs (DOUBLE_INT_ASM_OP, file);
12532 else
12533 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
12534 k[0] & 0xffffffff, k[1] & 0xffffffff,
12535 k[2] & 0xffffffff, k[3] & 0xffffffff);
12536 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
12537 k[0] & 0xffffffff, k[1] & 0xffffffff,
12538 k[2] & 0xffffffff, k[3] & 0xffffffff);
12539 return;
12541 else
12543 if (TARGET_MINIMAL_TOC)
12544 fputs ("\t.long ", file);
12545 else
12546 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
12547 k[0] & 0xffffffff, k[1] & 0xffffffff,
12548 k[2] & 0xffffffff, k[3] & 0xffffffff);
12549 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
12550 k[0] & 0xffffffff, k[1] & 0xffffffff,
12551 k[2] & 0xffffffff, k[3] & 0xffffffff);
12552 return;
12555 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
12557 REAL_VALUE_TYPE rv;
12558 long k[2];
12560 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
12561 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
12563 if (TARGET_64BIT)
12565 if (TARGET_MINIMAL_TOC)
12566 fputs (DOUBLE_INT_ASM_OP, file);
12567 else
12568 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
12569 k[0] & 0xffffffff, k[1] & 0xffffffff);
12570 fprintf (file, "0x%lx%08lx\n",
12571 k[0] & 0xffffffff, k[1] & 0xffffffff);
12572 return;
12574 else
12576 if (TARGET_MINIMAL_TOC)
12577 fputs ("\t.long ", file);
12578 else
12579 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
12580 k[0] & 0xffffffff, k[1] & 0xffffffff);
12581 fprintf (file, "0x%lx,0x%lx\n",
12582 k[0] & 0xffffffff, k[1] & 0xffffffff);
12583 return;
12586 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
12588 REAL_VALUE_TYPE rv;
12589 long l;
12591 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
12592 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
12594 if (TARGET_64BIT)
12596 if (TARGET_MINIMAL_TOC)
12597 fputs (DOUBLE_INT_ASM_OP, file);
12598 else
12599 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
12600 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
12601 return;
12603 else
12605 if (TARGET_MINIMAL_TOC)
12606 fputs ("\t.long ", file);
12607 else
12608 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
12609 fprintf (file, "0x%lx\n", l & 0xffffffff);
12610 return;
12613 else if (GET_MODE (x) == VOIDmode
12614 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
12616 unsigned HOST_WIDE_INT low;
12617 HOST_WIDE_INT high;
12619 if (GET_CODE (x) == CONST_DOUBLE)
12621 low = CONST_DOUBLE_LOW (x);
12622 high = CONST_DOUBLE_HIGH (x);
12624 else
12625 #if HOST_BITS_PER_WIDE_INT == 32
12627 low = INTVAL (x);
12628 high = (low & 0x80000000) ? ~0 : 0;
12630 #else
12632 low = INTVAL (x) & 0xffffffff;
12633 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
12635 #endif
12637 /* TOC entries are always Pmode-sized, but since this
12638 is a bigendian machine then if we're putting smaller
12639 integer constants in the TOC we have to pad them.
12640 (This is still a win over putting the constants in
12641 a separate constant pool, because then we'd have
12642 to have both a TOC entry _and_ the actual constant.)
12644 For a 32-bit target, CONST_INT values are loaded and shifted
12645 entirely within `low' and can be stored in one TOC entry. */
12647 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
12648 abort ();/* It would be easy to make this work, but it doesn't now. */
12650 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
12652 #if HOST_BITS_PER_WIDE_INT == 32
12653 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
12654 POINTER_SIZE, &low, &high, 0);
12655 #else
12656 low |= high << 32;
12657 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
12658 high = (HOST_WIDE_INT) low >> 32;
12659 low &= 0xffffffff;
12660 #endif
12663 if (TARGET_64BIT)
12665 if (TARGET_MINIMAL_TOC)
12666 fputs (DOUBLE_INT_ASM_OP, file);
12667 else
12668 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
12669 (long) high & 0xffffffff, (long) low & 0xffffffff);
12670 fprintf (file, "0x%lx%08lx\n",
12671 (long) high & 0xffffffff, (long) low & 0xffffffff);
12672 return;
12674 else
12676 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
12678 if (TARGET_MINIMAL_TOC)
12679 fputs ("\t.long ", file);
12680 else
12681 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
12682 (long) high & 0xffffffff, (long) low & 0xffffffff);
12683 fprintf (file, "0x%lx,0x%lx\n",
12684 (long) high & 0xffffffff, (long) low & 0xffffffff);
12686 else
12688 if (TARGET_MINIMAL_TOC)
12689 fputs ("\t.long ", file);
12690 else
12691 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
12692 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
12694 return;
12698 if (GET_CODE (x) == CONST)
12700 if (GET_CODE (XEXP (x, 0)) != PLUS)
12701 abort ();
12703 base = XEXP (XEXP (x, 0), 0);
12704 offset = INTVAL (XEXP (XEXP (x, 0), 1));
12707 if (GET_CODE (base) == SYMBOL_REF)
12708 name = XSTR (base, 0);
12709 else if (GET_CODE (base) == LABEL_REF)
12710 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
12711 else if (GET_CODE (base) == CODE_LABEL)
12712 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
12713 else
12714 abort ();
12716 real_name = (*targetm.strip_name_encoding) (name);
12717 if (TARGET_MINIMAL_TOC)
12718 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
12719 else
12721 fprintf (file, "\t.tc %s", real_name);
12723 if (offset < 0)
12724 fprintf (file, ".N%d", - offset);
12725 else if (offset)
12726 fprintf (file, ".P%d", offset);
12728 fputs ("[TC],", file);
12731 /* Currently C++ toc references to vtables can be emitted before it
12732 is decided whether the vtable is public or private. If this is
12733 the case, then the linker will eventually complain that there is
12734 a TOC reference to an unknown section. Thus, for vtables only,
12735 we emit the TOC reference to reference the symbol and not the
12736 section. */
12737 if (VTABLE_NAME_P (name))
12739 RS6000_OUTPUT_BASENAME (file, name);
12740 if (offset < 0)
12741 fprintf (file, "%d", offset);
12742 else if (offset > 0)
12743 fprintf (file, "+%d", offset);
12745 else
12746 output_addr_const (file, x);
12747 putc ('\n', file);
12750 /* Output an assembler pseudo-op to write an ASCII string of N characters
12751 starting at P to FILE.
12753 On the RS/6000, we have to do this using the .byte operation and
12754 write out special characters outside the quoted string.
12755 Also, the assembler is broken; very long strings are truncated,
12756 so we must artificially break them up early. */
12758 void
12759 output_ascii (file, p, n)
12760 FILE *file;
12761 const char *p;
12762 int n;
12764 char c;
12765 int i, count_string;
12766 const char *for_string = "\t.byte \"";
12767 const char *for_decimal = "\t.byte ";
12768 const char *to_close = NULL;
12770 count_string = 0;
12771 for (i = 0; i < n; i++)
12773 c = *p++;
12774 if (c >= ' ' && c < 0177)
12776 if (for_string)
12777 fputs (for_string, file);
12778 putc (c, file);
12780 /* Write two quotes to get one. */
12781 if (c == '"')
12783 putc (c, file);
12784 ++count_string;
12787 for_string = NULL;
12788 for_decimal = "\"\n\t.byte ";
12789 to_close = "\"\n";
12790 ++count_string;
12792 if (count_string >= 512)
12794 fputs (to_close, file);
12796 for_string = "\t.byte \"";
12797 for_decimal = "\t.byte ";
12798 to_close = NULL;
12799 count_string = 0;
12802 else
12804 if (for_decimal)
12805 fputs (for_decimal, file);
12806 fprintf (file, "%d", c);
12808 for_string = "\n\t.byte \"";
12809 for_decimal = ", ";
12810 to_close = "\n";
12811 count_string = 0;
12815 /* Now close the string if we have written one. Then end the line. */
12816 if (to_close)
12817 fputs (to_close, file);
12820 /* Generate a unique section name for FILENAME for a section type
12821 represented by SECTION_DESC. Output goes into BUF.
12823 SECTION_DESC can be any string, as long as it is different for each
12824 possible section type.
12826 We name the section in the same manner as xlc. The name begins with an
12827 underscore followed by the filename (after stripping any leading directory
12828 names) with the last period replaced by the string SECTION_DESC. If
12829 FILENAME does not contain a period, SECTION_DESC is appended to the end of
12830 the name. */
12832 void
12833 rs6000_gen_section_name (buf, filename, section_desc)
12834 char **buf;
12835 const char *filename;
12836 const char *section_desc;
12838 const char *q, *after_last_slash, *last_period = 0;
12839 char *p;
12840 int len;
12842 after_last_slash = filename;
12843 for (q = filename; *q; q++)
12845 if (*q == '/')
12846 after_last_slash = q + 1;
12847 else if (*q == '.')
12848 last_period = q;
12851 len = strlen (after_last_slash) + strlen (section_desc) + 2;
12852 *buf = (char *) xmalloc (len);
12854 p = *buf;
12855 *p++ = '_';
12857 for (q = after_last_slash; *q; q++)
12859 if (q == last_period)
12861 strcpy (p, section_desc);
12862 p += strlen (section_desc);
12863 break;
12866 else if (ISALNUM (*q))
12867 *p++ = *q;
12870 if (last_period == 0)
12871 strcpy (p, section_desc);
12872 else
12873 *p = '\0';
12876 /* Emit profile function. */
12878 void
12879 output_profile_hook (labelno)
12880 int labelno ATTRIBUTE_UNUSED;
12882 if (TARGET_PROFILE_KERNEL)
12883 return;
12885 if (DEFAULT_ABI == ABI_AIX)
12887 #ifdef NO_PROFILE_COUNTERS
12888 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
12889 #else
12890 char buf[30];
12891 const char *label_name;
12892 rtx fun;
12894 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
12895 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
12896 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
12898 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
12899 fun, Pmode);
12900 #endif
12902 else if (DEFAULT_ABI == ABI_DARWIN)
12904 const char *mcount_name = RS6000_MCOUNT;
12905 int caller_addr_regno = LINK_REGISTER_REGNUM;
12907 /* Be conservative and always set this, at least for now. */
12908 current_function_uses_pic_offset_table = 1;
12910 #if TARGET_MACHO
12911 /* For PIC code, set up a stub and collect the caller's address
12912 from r0, which is where the prologue puts it. */
12913 if (MACHOPIC_INDIRECT)
12915 mcount_name = machopic_stub_name (mcount_name);
12916 if (current_function_uses_pic_offset_table)
12917 caller_addr_regno = 0;
12919 #endif
12920 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
12921 0, VOIDmode, 1,
12922 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
12926 /* Write function profiler code. */
12928 void
12929 output_function_profiler (file, labelno)
12930 FILE *file;
12931 int labelno;
12933 char buf[100];
12934 int save_lr = 8;
12936 switch (DEFAULT_ABI)
12938 default:
12939 abort ();
12941 case ABI_V4:
12942 save_lr = 4;
12943 if (!TARGET_32BIT)
12945 warning ("no profiling of 64-bit code for this ABI");
12946 return;
12948 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
12949 fprintf (file, "\tmflr %s\n", reg_names[0]);
12950 if (flag_pic == 1)
12952 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
12953 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12954 reg_names[0], save_lr, reg_names[1]);
12955 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
12956 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
12957 assemble_name (file, buf);
12958 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
12960 else if (flag_pic > 1)
12962 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12963 reg_names[0], save_lr, reg_names[1]);
12964 /* Now, we need to get the address of the label. */
12965 fputs ("\tbl 1f\n\t.long ", file);
12966 assemble_name (file, buf);
12967 fputs ("-.\n1:", file);
12968 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
12969 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
12970 reg_names[0], reg_names[11]);
12971 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
12972 reg_names[0], reg_names[0], reg_names[11]);
12974 else
12976 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
12977 assemble_name (file, buf);
12978 fputs ("@ha\n", file);
12979 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12980 reg_names[0], save_lr, reg_names[1]);
12981 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
12982 assemble_name (file, buf);
12983 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
12986 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
12987 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
12988 break;
12990 case ABI_AIX:
12991 case ABI_DARWIN:
12992 if (!TARGET_PROFILE_KERNEL)
12994 /* Don't do anything, done in output_profile_hook (). */
12996 else
12998 if (TARGET_32BIT)
12999 abort ();
13001 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
13002 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
13004 if (current_function_needs_context)
13006 asm_fprintf (file, "\tstd %s,24(%s)\n",
13007 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
13008 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13009 asm_fprintf (file, "\tld %s,24(%s)\n",
13010 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
13012 else
13013 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13015 break;
13020 static int
13021 rs6000_use_dfa_pipeline_interface ()
13023 return 1;
13026 /* Power4 load update and store update instructions are cracked into a
13027 load or store and an integer insn which are executed in the same cycle.
13028 Branches have their own dispatch slot which does not count against the
13029 GCC issue rate, but it changes the program flow so there are no other
13030 instructions to issue in this cycle. */
13032 static int
13033 rs6000_variable_issue (stream, verbose, insn, more)
13034 FILE *stream ATTRIBUTE_UNUSED;
13035 int verbose ATTRIBUTE_UNUSED;
13036 rtx insn;
13037 int more;
13039 if (GET_CODE (PATTERN (insn)) == USE
13040 || GET_CODE (PATTERN (insn)) == CLOBBER)
13041 return more;
13043 if (rs6000_cpu == PROCESSOR_POWER4)
13045 enum attr_type type = get_attr_type (insn);
13046 if (type == TYPE_LOAD_EXT_U || type == TYPE_LOAD_EXT_UX
13047 || type == TYPE_LOAD_UX || type == TYPE_STORE_UX)
13048 return 0;
13049 else if (type == TYPE_LOAD_U || type == TYPE_STORE_U
13050 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
13051 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
13052 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
13053 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
13054 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
13055 || type == TYPE_IDIV || type == TYPE_LDIV)
13056 return more > 2 ? more - 2 : 0;
13059 return more - 1;
13062 /* Adjust the cost of a scheduling dependency. Return the new cost of
13063 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
13065 static int
13066 rs6000_adjust_cost (insn, link, dep_insn, cost)
13067 rtx insn;
13068 rtx link;
13069 rtx dep_insn ATTRIBUTE_UNUSED;
13070 int cost;
13072 if (! recog_memoized (insn))
13073 return 0;
13075 if (REG_NOTE_KIND (link) != 0)
13076 return 0;
13078 if (REG_NOTE_KIND (link) == 0)
13080 /* Data dependency; DEP_INSN writes a register that INSN reads
13081 some cycles later. */
13082 switch (get_attr_type (insn))
13084 case TYPE_JMPREG:
13085 /* Tell the first scheduling pass about the latency between
13086 a mtctr and bctr (and mtlr and br/blr). The first
13087 scheduling pass will not know about this latency since
13088 the mtctr instruction, which has the latency associated
13089 to it, will be generated by reload. */
13090 return TARGET_POWER ? 5 : 4;
13091 case TYPE_BRANCH:
13092 /* Leave some extra cycles between a compare and its
13093 dependent branch, to inhibit expensive mispredicts. */
13094 if ((rs6000_cpu_attr == CPU_PPC603
13095 || rs6000_cpu_attr == CPU_PPC604
13096 || rs6000_cpu_attr == CPU_PPC604E
13097 || rs6000_cpu_attr == CPU_PPC620
13098 || rs6000_cpu_attr == CPU_PPC630
13099 || rs6000_cpu_attr == CPU_PPC750
13100 || rs6000_cpu_attr == CPU_PPC7400
13101 || rs6000_cpu_attr == CPU_PPC7450
13102 || rs6000_cpu_attr == CPU_POWER4)
13103 && recog_memoized (dep_insn)
13104 && (INSN_CODE (dep_insn) >= 0)
13105 && (get_attr_type (dep_insn) == TYPE_CMP
13106 || get_attr_type (dep_insn) == TYPE_COMPARE
13107 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
13108 || get_attr_type (dep_insn) == TYPE_IMUL_COMPARE
13109 || get_attr_type (dep_insn) == TYPE_LMUL_COMPARE
13110 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
13111 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL
13112 || get_attr_type (dep_insn) == TYPE_DELAYED_CR))
13113 return cost + 2;
13114 default:
13115 break;
13117 /* Fall out to return default cost. */
13120 return cost;
13123 /* A C statement (sans semicolon) to update the integer scheduling
13124 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
13125 INSN earlier, increase the priority to execute INSN later. Do not
13126 define this macro if you do not need to adjust the scheduling
13127 priorities of insns. */
13129 static int
13130 rs6000_adjust_priority (insn, priority)
13131 rtx insn ATTRIBUTE_UNUSED;
13132 int priority;
13134 /* On machines (like the 750) which have asymmetric integer units,
13135 where one integer unit can do multiply and divides and the other
13136 can't, reduce the priority of multiply/divide so it is scheduled
13137 before other integer operations. */
13139 #if 0
13140 if (! INSN_P (insn))
13141 return priority;
13143 if (GET_CODE (PATTERN (insn)) == USE)
13144 return priority;
13146 switch (rs6000_cpu_attr) {
13147 case CPU_PPC750:
13148 switch (get_attr_type (insn))
13150 default:
13151 break;
13153 case TYPE_IMUL:
13154 case TYPE_IDIV:
13155 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
13156 priority, priority);
13157 if (priority >= 0 && priority < 0x01000000)
13158 priority >>= 3;
13159 break;
13162 #endif
13164 return priority;
13167 /* Return how many instructions the machine can issue per cycle. */
13169 static int
13170 rs6000_issue_rate ()
13172 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
13173 if (!reload_completed)
13174 return 1;
13176 switch (rs6000_cpu_attr) {
13177 case CPU_RIOS1: /* ? */
13178 case CPU_RS64A:
13179 case CPU_PPC601: /* ? */
13180 case CPU_PPC7450:
13181 return 3;
13182 case CPU_PPC440:
13183 case CPU_PPC603:
13184 case CPU_PPC750:
13185 case CPU_PPC7400:
13186 case CPU_PPC8540:
13187 return 2;
13188 case CPU_RIOS2:
13189 case CPU_PPC604:
13190 case CPU_PPC604E:
13191 case CPU_PPC620:
13192 case CPU_PPC630:
13193 case CPU_POWER4:
13194 return 4;
13195 default:
13196 return 1;
13200 /* Return how many instructions to look ahead for better insn
13201 scheduling. */
13203 static int
13204 rs6000_use_sched_lookahead ()
13206 if (rs6000_cpu_attr == CPU_PPC8540)
13207 return 4;
13208 return 0;
13212 /* Length in units of the trampoline for entering a nested function. */
13215 rs6000_trampoline_size ()
13217 int ret = 0;
13219 switch (DEFAULT_ABI)
13221 default:
13222 abort ();
13224 case ABI_AIX:
13225 ret = (TARGET_32BIT) ? 12 : 24;
13226 break;
13228 case ABI_DARWIN:
13229 case ABI_V4:
13230 ret = (TARGET_32BIT) ? 40 : 48;
13231 break;
13234 return ret;
13237 /* Emit RTL insns to initialize the variable parts of a trampoline.
13238 FNADDR is an RTX for the address of the function's pure code.
13239 CXT is an RTX for the static chain value for the function. */
13241 void
13242 rs6000_initialize_trampoline (addr, fnaddr, cxt)
13243 rtx addr;
13244 rtx fnaddr;
13245 rtx cxt;
13247 enum machine_mode pmode = Pmode;
13248 int regsize = (TARGET_32BIT) ? 4 : 8;
13249 rtx ctx_reg = force_reg (pmode, cxt);
13251 switch (DEFAULT_ABI)
13253 default:
13254 abort ();
13256 /* Macros to shorten the code expansions below. */
13257 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
13258 #define MEM_PLUS(addr,offset) \
13259 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
13261 /* Under AIX, just build the 3 word function descriptor */
13262 case ABI_AIX:
13264 rtx fn_reg = gen_reg_rtx (pmode);
13265 rtx toc_reg = gen_reg_rtx (pmode);
13266 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
13267 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
13268 emit_move_insn (MEM_DEREF (addr), fn_reg);
13269 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
13270 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
13272 break;
13274 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
13275 case ABI_DARWIN:
13276 case ABI_V4:
13277 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
13278 FALSE, VOIDmode, 4,
13279 addr, pmode,
13280 GEN_INT (rs6000_trampoline_size ()), SImode,
13281 fnaddr, pmode,
13282 ctx_reg, pmode);
13283 break;
13286 return;
13290 /* Table of valid machine attributes. */
13292 const struct attribute_spec rs6000_attribute_table[] =
13294 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
13295 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
13296 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
13297 { NULL, 0, 0, false, false, false, NULL }
13300 /* Handle a "longcall" or "shortcall" attribute; arguments as in
13301 struct attribute_spec.handler. */
13303 static tree
13304 rs6000_handle_longcall_attribute (node, name, args, flags, no_add_attrs)
13305 tree *node;
13306 tree name;
13307 tree args ATTRIBUTE_UNUSED;
13308 int flags ATTRIBUTE_UNUSED;
13309 bool *no_add_attrs;
13311 if (TREE_CODE (*node) != FUNCTION_TYPE
13312 && TREE_CODE (*node) != FIELD_DECL
13313 && TREE_CODE (*node) != TYPE_DECL)
13315 warning ("`%s' attribute only applies to functions",
13316 IDENTIFIER_POINTER (name));
13317 *no_add_attrs = true;
13320 return NULL_TREE;
13323 /* Set longcall attributes on all functions declared when
13324 rs6000_default_long_calls is true. */
13325 static void
13326 rs6000_set_default_type_attributes (type)
13327 tree type;
13329 if (rs6000_default_long_calls
13330 && (TREE_CODE (type) == FUNCTION_TYPE
13331 || TREE_CODE (type) == METHOD_TYPE))
13332 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
13333 NULL_TREE,
13334 TYPE_ATTRIBUTES (type));
13337 /* Return a reference suitable for calling a function with the
13338 longcall attribute. */
13340 struct rtx_def *
13341 rs6000_longcall_ref (call_ref)
13342 rtx call_ref;
13344 const char *call_name;
13345 tree node;
13347 if (GET_CODE (call_ref) != SYMBOL_REF)
13348 return call_ref;
13350 /* System V adds '.' to the internal name, so skip them. */
13351 call_name = XSTR (call_ref, 0);
13352 if (*call_name == '.')
13354 while (*call_name == '.')
13355 call_name++;
13357 node = get_identifier (call_name);
13358 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
13361 return force_reg (Pmode, call_ref);
13364 #ifdef USING_ELFOS_H
13366 /* A C statement or statements to switch to the appropriate section
13367 for output of RTX in mode MODE. You can assume that RTX is some
13368 kind of constant in RTL. The argument MODE is redundant except in
13369 the case of a `const_int' rtx. Select the section by calling
13370 `text_section' or one of the alternatives for other sections.
13372 Do not define this macro if you put all constants in the read-only
13373 data section. */
13375 static void
13376 rs6000_elf_select_rtx_section (mode, x, align)
13377 enum machine_mode mode;
13378 rtx x;
13379 unsigned HOST_WIDE_INT align;
13381 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
13382 toc_section ();
13383 else
13384 default_elf_select_rtx_section (mode, x, align);
13387 /* A C statement or statements to switch to the appropriate
13388 section for output of DECL. DECL is either a `VAR_DECL' node
13389 or a constant of some sort. RELOC indicates whether forming
13390 the initial value of DECL requires link-time relocations. */
13392 static void
13393 rs6000_elf_select_section (decl, reloc, align)
13394 tree decl;
13395 int reloc;
13396 unsigned HOST_WIDE_INT align;
13398 /* Pretend that we're always building for a shared library when
13399 ABI_AIX, because otherwise we end up with dynamic relocations
13400 in read-only sections. This happens for function pointers,
13401 references to vtables in typeinfo, and probably other cases. */
13402 default_elf_select_section_1 (decl, reloc, align,
13403 flag_pic || DEFAULT_ABI == ABI_AIX);
13406 /* A C statement to build up a unique section name, expressed as a
13407 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
13408 RELOC indicates whether the initial value of EXP requires
13409 link-time relocations. If you do not define this macro, GCC will use
13410 the symbol name prefixed by `.' as the section name. Note - this
13411 macro can now be called for uninitialized data items as well as
13412 initialized data and functions. */
13414 static void
13415 rs6000_elf_unique_section (decl, reloc)
13416 tree decl;
13417 int reloc;
13419 /* As above, pretend that we're always building for a shared library
13420 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
13421 default_unique_section_1 (decl, reloc,
13422 flag_pic || DEFAULT_ABI == ABI_AIX);
13425 /* For a SYMBOL_REF, set generic flags and then perform some
13426 target-specific processing.
13428 When the AIX ABI is requested on a non-AIX system, replace the
13429 function name with the real name (with a leading .) rather than the
13430 function descriptor name. This saves a lot of overriding code to
13431 read the prefixes. */
13433 static void
13434 rs6000_elf_encode_section_info (decl, rtl, first)
13435 tree decl;
13436 rtx rtl;
13437 int first;
13439 default_encode_section_info (decl, rtl, first);
13441 if (first
13442 && TREE_CODE (decl) == FUNCTION_DECL
13443 && !TARGET_AIX
13444 && DEFAULT_ABI == ABI_AIX)
13446 rtx sym_ref = XEXP (rtl, 0);
13447 size_t len = strlen (XSTR (sym_ref, 0));
13448 char *str = alloca (len + 2);
13449 str[0] = '.';
13450 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
13451 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
13455 static bool
13456 rs6000_elf_in_small_data_p (decl)
13457 tree decl;
13459 if (rs6000_sdata == SDATA_NONE)
13460 return false;
13462 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
13464 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
13465 if (strcmp (section, ".sdata") == 0
13466 || strcmp (section, ".sdata2") == 0
13467 || strcmp (section, ".sbss") == 0
13468 || strcmp (section, ".sbss2") == 0
13469 || strcmp (section, ".PPC.EMB.sdata0") == 0
13470 || strcmp (section, ".PPC.EMB.sbss0") == 0)
13471 return true;
13473 else
13475 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
13477 if (size > 0
13478 && (unsigned HOST_WIDE_INT) size <= g_switch_value
13479 /* If it's not public, and we're not going to reference it there,
13480 there's no need to put it in the small data section. */
13481 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
13482 return true;
13485 return false;
13488 #endif /* USING_ELFOS_H */
13491 /* Return a REG that occurs in ADDR with coefficient 1.
13492 ADDR can be effectively incremented by incrementing REG.
13494 r0 is special and we must not select it as an address
13495 register by this routine since our caller will try to
13496 increment the returned register via an "la" instruction. */
13498 struct rtx_def *
13499 find_addr_reg (addr)
13500 rtx addr;
13502 while (GET_CODE (addr) == PLUS)
13504 if (GET_CODE (XEXP (addr, 0)) == REG
13505 && REGNO (XEXP (addr, 0)) != 0)
13506 addr = XEXP (addr, 0);
13507 else if (GET_CODE (XEXP (addr, 1)) == REG
13508 && REGNO (XEXP (addr, 1)) != 0)
13509 addr = XEXP (addr, 1);
13510 else if (CONSTANT_P (XEXP (addr, 0)))
13511 addr = XEXP (addr, 1);
13512 else if (CONSTANT_P (XEXP (addr, 1)))
13513 addr = XEXP (addr, 0);
13514 else
13515 abort ();
13517 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
13518 return addr;
13519 abort ();
13522 void
13523 rs6000_fatal_bad_address (op)
13524 rtx op;
13526 fatal_insn ("bad address", op);
13529 #if TARGET_MACHO
13531 #if 0
13532 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
13533 reference and a constant. */
13536 symbolic_operand (op)
13537 rtx op;
13539 switch (GET_CODE (op))
13541 case SYMBOL_REF:
13542 case LABEL_REF:
13543 return 1;
13544 case CONST:
13545 op = XEXP (op, 0);
13546 return (GET_CODE (op) == SYMBOL_REF ||
13547 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
13548 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
13549 && GET_CODE (XEXP (op, 1)) == CONST_INT);
13550 default:
13551 return 0;
13554 #endif
13556 #ifdef RS6000_LONG_BRANCH
13558 static tree stub_list = 0;
13560 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
13561 procedure calls to the linked list. */
13563 void
13564 add_compiler_stub (label_name, function_name, line_number)
13565 tree label_name;
13566 tree function_name;
13567 int line_number;
13569 tree stub = build_tree_list (function_name, label_name);
13570 TREE_TYPE (stub) = build_int_2 (line_number, 0);
13571 TREE_CHAIN (stub) = stub_list;
13572 stub_list = stub;
13575 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
13576 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
13577 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
13579 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
13580 handling procedure calls from the linked list and initializes the
13581 linked list. */
13583 void
13584 output_compiler_stub ()
13586 char tmp_buf[256];
13587 char label_buf[256];
13588 tree stub;
13590 if (!flag_pic)
13591 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
13593 fprintf (asm_out_file,
13594 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
13596 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
13597 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
13598 fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
13599 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
13601 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
13602 strcpy (label_buf,
13603 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
13604 else
13606 label_buf[0] = '_';
13607 strcpy (label_buf+1,
13608 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
13611 strcpy (tmp_buf, "lis r12,hi16(");
13612 strcat (tmp_buf, label_buf);
13613 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
13614 strcat (tmp_buf, label_buf);
13615 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
13616 output_asm_insn (tmp_buf, 0);
13618 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
13619 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
13620 fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
13621 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
13624 stub_list = 0;
13627 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
13628 already there or not. */
13631 no_previous_def (function_name)
13632 tree function_name;
13634 tree stub;
13635 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
13636 if (function_name == STUB_FUNCTION_NAME (stub))
13637 return 0;
13638 return 1;
13641 /* GET_PREV_LABEL gets the label name from the previous definition of
13642 the function. */
13644 tree
13645 get_prev_label (function_name)
13646 tree function_name;
13648 tree stub;
13649 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
13650 if (function_name == STUB_FUNCTION_NAME (stub))
13651 return STUB_LABEL_NAME (stub);
13652 return 0;
13655 /* INSN is either a function call or a millicode call. It may have an
13656 unconditional jump in its delay slot.
13658 CALL_DEST is the routine we are calling. */
13660 char *
13661 output_call (insn, call_dest, operand_number)
13662 rtx insn;
13663 rtx call_dest;
13664 int operand_number;
13666 static char buf[256];
13667 if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
13669 tree labelname;
13670 tree funname = get_identifier (XSTR (call_dest, 0));
13672 if (no_previous_def (funname))
13674 int line_number = 0;
13675 rtx label_rtx = gen_label_rtx ();
13676 char *label_buf, temp_buf[256];
13677 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
13678 CODE_LABEL_NUMBER (label_rtx));
13679 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
13680 labelname = get_identifier (label_buf);
13681 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
13682 if (insn)
13683 line_number = NOTE_LINE_NUMBER (insn);
13684 add_compiler_stub (labelname, funname, line_number);
13686 else
13687 labelname = get_prev_label (funname);
13689 sprintf (buf, "jbsr %%z%d,%.246s",
13690 operand_number, IDENTIFIER_POINTER (labelname));
13691 return buf;
13693 else
13695 sprintf (buf, "bl %%z%d", operand_number);
13696 return buf;
13700 #endif /* RS6000_LONG_BRANCH */
13702 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
13703 do { \
13704 const char *const symbol_ = (SYMBOL); \
13705 char *buffer_ = (BUF); \
13706 if (symbol_[0] == '"') \
13708 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
13710 else if (name_needs_quotes(symbol_)) \
13712 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
13714 else \
13716 sprintf(buffer_, "L%d$%s", (N), symbol_); \
13718 } while (0)
13721 /* Generate PIC and indirect symbol stubs. */
13723 void
13724 machopic_output_stub (file, symb, stub)
13725 FILE *file;
13726 const char *symb, *stub;
13728 unsigned int length;
13729 char *symbol_name, *lazy_ptr_name;
13730 char *local_label_0;
13731 static int label = 0;
13733 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
13734 symb = (*targetm.strip_name_encoding) (symb);
13736 label += 1;
13738 length = strlen (symb);
13739 symbol_name = alloca (length + 32);
13740 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
13742 lazy_ptr_name = alloca (length + 32);
13743 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
13745 local_label_0 = alloca (length + 32);
13746 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0, symb, length, 0);
13748 if (flag_pic == 2)
13749 machopic_picsymbol_stub1_section ();
13750 else
13751 machopic_symbol_stub1_section ();
13752 fprintf (file, "\t.align 2\n");
13754 fprintf (file, "%s:\n", stub);
13755 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
13757 if (flag_pic == 2)
13759 fprintf (file, "\tmflr r0\n");
13760 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
13761 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
13762 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
13763 lazy_ptr_name, local_label_0);
13764 fprintf (file, "\tmtlr r0\n");
13765 fprintf (file, "\tlwzu r12,lo16(%s-%s)(r11)\n",
13766 lazy_ptr_name, local_label_0);
13767 fprintf (file, "\tmtctr r12\n");
13768 fprintf (file, "\tbctr\n");
13770 else
13772 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
13773 fprintf (file, "\tlwzu r12,lo16(%s)(r11)\n", lazy_ptr_name);
13774 fprintf (file, "\tmtctr r12\n");
13775 fprintf (file, "\tbctr\n");
13778 machopic_lazy_symbol_ptr_section ();
13779 fprintf (file, "%s:\n", lazy_ptr_name);
13780 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
13781 fprintf (file, "\t.long dyld_stub_binding_helper\n");
13784 /* Legitimize PIC addresses. If the address is already
13785 position-independent, we return ORIG. Newly generated
13786 position-independent addresses go into a reg. This is REG if non
13787 zero, otherwise we allocate register(s) as necessary. */
13789 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
13792 rs6000_machopic_legitimize_pic_address (orig, mode, reg)
13793 rtx orig;
13794 enum machine_mode mode;
13795 rtx reg;
13797 rtx base, offset;
13799 if (reg == NULL && ! reload_in_progress && ! reload_completed)
13800 reg = gen_reg_rtx (Pmode);
13802 if (GET_CODE (orig) == CONST)
13804 if (GET_CODE (XEXP (orig, 0)) == PLUS
13805 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
13806 return orig;
13808 if (GET_CODE (XEXP (orig, 0)) == PLUS)
13810 base =
13811 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
13812 Pmode, reg);
13813 offset =
13814 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
13815 Pmode, reg);
13817 else
13818 abort ();
13820 if (GET_CODE (offset) == CONST_INT)
13822 if (SMALL_INT (offset))
13823 return plus_constant (base, INTVAL (offset));
13824 else if (! reload_in_progress && ! reload_completed)
13825 offset = force_reg (Pmode, offset);
13826 else
13828 rtx mem = force_const_mem (Pmode, orig);
13829 return machopic_legitimize_pic_address (mem, Pmode, reg);
13832 return gen_rtx (PLUS, Pmode, base, offset);
13835 /* Fall back on generic machopic code. */
13836 return machopic_legitimize_pic_address (orig, mode, reg);
13839 /* This is just a placeholder to make linking work without having to
13840 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
13841 ever needed for Darwin (not too likely!) this would have to get a
13842 real definition. */
13844 void
13845 toc_section ()
13849 #endif /* TARGET_MACHO */
13851 #if TARGET_ELF
13852 static unsigned int
13853 rs6000_elf_section_type_flags (decl, name, reloc)
13854 tree decl;
13855 const char *name;
13856 int reloc;
13858 unsigned int flags
13859 = default_section_type_flags_1 (decl, name, reloc,
13860 flag_pic || DEFAULT_ABI == ABI_AIX);
13862 if (TARGET_RELOCATABLE)
13863 flags |= SECTION_WRITE;
13865 return flags;
13868 /* Record an element in the table of global constructors. SYMBOL is
13869 a SYMBOL_REF of the function to be called; PRIORITY is a number
13870 between 0 and MAX_INIT_PRIORITY.
13872 This differs from default_named_section_asm_out_constructor in
13873 that we have special handling for -mrelocatable. */
13875 static void
13876 rs6000_elf_asm_out_constructor (symbol, priority)
13877 rtx symbol;
13878 int priority;
13880 const char *section = ".ctors";
13881 char buf[16];
13883 if (priority != DEFAULT_INIT_PRIORITY)
13885 sprintf (buf, ".ctors.%.5u",
13886 /* Invert the numbering so the linker puts us in the proper
13887 order; constructors are run from right to left, and the
13888 linker sorts in increasing order. */
13889 MAX_INIT_PRIORITY - priority);
13890 section = buf;
13893 named_section_flags (section, SECTION_WRITE);
13894 assemble_align (POINTER_SIZE);
13896 if (TARGET_RELOCATABLE)
13898 fputs ("\t.long (", asm_out_file);
13899 output_addr_const (asm_out_file, symbol);
13900 fputs (")@fixup\n", asm_out_file);
13902 else
13903 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
13906 static void
13907 rs6000_elf_asm_out_destructor (symbol, priority)
13908 rtx symbol;
13909 int priority;
13911 const char *section = ".dtors";
13912 char buf[16];
13914 if (priority != DEFAULT_INIT_PRIORITY)
13916 sprintf (buf, ".dtors.%.5u",
13917 /* Invert the numbering so the linker puts us in the proper
13918 order; constructors are run from right to left, and the
13919 linker sorts in increasing order. */
13920 MAX_INIT_PRIORITY - priority);
13921 section = buf;
13924 named_section_flags (section, SECTION_WRITE);
13925 assemble_align (POINTER_SIZE);
13927 if (TARGET_RELOCATABLE)
13929 fputs ("\t.long (", asm_out_file);
13930 output_addr_const (asm_out_file, symbol);
13931 fputs (")@fixup\n", asm_out_file);
13933 else
13934 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
13936 #endif
13938 #if TARGET_XCOFF
13939 static void
13940 rs6000_xcoff_asm_globalize_label (stream, name)
13941 FILE *stream;
13942 const char *name;
13944 fputs (GLOBAL_ASM_OP, stream);
13945 RS6000_OUTPUT_BASENAME (stream, name);
13946 putc ('\n', stream);
13949 static void
13950 rs6000_xcoff_asm_named_section (name, flags)
13951 const char *name;
13952 unsigned int flags;
13954 int smclass;
13955 static const char * const suffix[3] = { "PR", "RO", "RW" };
13957 if (flags & SECTION_CODE)
13958 smclass = 0;
13959 else if (flags & SECTION_WRITE)
13960 smclass = 2;
13961 else
13962 smclass = 1;
13964 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
13965 (flags & SECTION_CODE) ? "." : "",
13966 name, suffix[smclass], flags & SECTION_ENTSIZE);
13969 static void
13970 rs6000_xcoff_select_section (decl, reloc, align)
13971 tree decl;
13972 int reloc;
13973 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
13975 if (decl_readonly_section_1 (decl, reloc, 1))
13977 if (TREE_PUBLIC (decl))
13978 read_only_data_section ();
13979 else
13980 read_only_private_data_section ();
13982 else
13984 if (TREE_PUBLIC (decl))
13985 data_section ();
13986 else
13987 private_data_section ();
13991 static void
13992 rs6000_xcoff_unique_section (decl, reloc)
13993 tree decl;
13994 int reloc ATTRIBUTE_UNUSED;
13996 const char *name;
13998 /* Use select_section for private and uninitialized data. */
13999 if (!TREE_PUBLIC (decl)
14000 || DECL_COMMON (decl)
14001 || DECL_INITIAL (decl) == NULL_TREE
14002 || DECL_INITIAL (decl) == error_mark_node
14003 || (flag_zero_initialized_in_bss
14004 && initializer_zerop (DECL_INITIAL (decl))))
14005 return;
14007 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
14008 name = (*targetm.strip_name_encoding) (name);
14009 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
14012 /* Select section for constant in constant pool.
14014 On RS/6000, all constants are in the private read-only data area.
14015 However, if this is being placed in the TOC it must be output as a
14016 toc entry. */
14018 static void
14019 rs6000_xcoff_select_rtx_section (mode, x, align)
14020 enum machine_mode mode;
14021 rtx x;
14022 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
14024 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
14025 toc_section ();
14026 else
14027 read_only_private_data_section ();
14030 /* Remove any trailing [DS] or the like from the symbol name. */
14032 static const char *
14033 rs6000_xcoff_strip_name_encoding (name)
14034 const char *name;
14036 size_t len;
14037 if (*name == '*')
14038 name++;
14039 len = strlen (name);
14040 if (name[len - 1] == ']')
14041 return ggc_alloc_string (name, len - 4);
14042 else
14043 return name;
14046 /* Section attributes. AIX is always PIC. */
14048 static unsigned int
14049 rs6000_xcoff_section_type_flags (decl, name, reloc)
14050 tree decl;
14051 const char *name;
14052 int reloc;
14054 unsigned int align;
14055 unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
14057 /* Align to at least UNIT size. */
14058 if (flags & SECTION_CODE)
14059 align = MIN_UNITS_PER_WORD;
14060 else
14061 /* Increase alignment of large objects if not already stricter. */
14062 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
14063 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
14064 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
14066 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
14068 #endif /* TARGET_XCOFF */
14070 #if TARGET_MACHO
14071 /* Cross-module name binding. Darwin does not support overriding
14072 functions at dynamic-link time. */
14074 static bool
14075 rs6000_binds_local_p (decl)
14076 tree decl;
14078 return default_binds_local_p_1 (decl, 0);
14080 #endif
14082 /* Compute a (partial) cost for rtx X. Return true if the complete
14083 cost has been computed, and false if subexpressions should be
14084 scanned. In either case, *TOTAL contains the cost result. */
14086 static bool
14087 rs6000_rtx_costs (x, code, outer_code, total)
14088 rtx x;
14089 int code, outer_code ATTRIBUTE_UNUSED;
14090 int *total;
14092 switch (code)
14094 /* On the RS/6000, if it is valid in the insn, it is free.
14095 So this always returns 0. */
14096 case CONST_INT:
14097 case CONST:
14098 case LABEL_REF:
14099 case SYMBOL_REF:
14100 case CONST_DOUBLE:
14101 case HIGH:
14102 *total = 0;
14103 return true;
14105 case PLUS:
14106 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
14107 && ((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1))
14108 + 0x8000) >= 0x10000)
14109 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
14110 ? COSTS_N_INSNS (2)
14111 : COSTS_N_INSNS (1));
14112 return true;
14114 case AND:
14115 case IOR:
14116 case XOR:
14117 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
14118 && (INTVAL (XEXP (x, 1)) & (~ (HOST_WIDE_INT) 0xffff)) != 0
14119 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
14120 ? COSTS_N_INSNS (2)
14121 : COSTS_N_INSNS (1));
14122 return true;
14124 case MULT:
14125 if (optimize_size)
14127 *total = COSTS_N_INSNS (2);
14128 return true;
14130 switch (rs6000_cpu)
14132 case PROCESSOR_RIOS1:
14133 case PROCESSOR_PPC405:
14134 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14135 ? COSTS_N_INSNS (5)
14136 : (INTVAL (XEXP (x, 1)) >= -256
14137 && INTVAL (XEXP (x, 1)) <= 255)
14138 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
14139 return true;
14141 case PROCESSOR_PPC440:
14142 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14143 ? COSTS_N_INSNS (3)
14144 : COSTS_N_INSNS (2));
14145 return true;
14147 case PROCESSOR_RS64A:
14148 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14149 ? GET_MODE (XEXP (x, 1)) != DImode
14150 ? COSTS_N_INSNS (20) : COSTS_N_INSNS (34)
14151 : (INTVAL (XEXP (x, 1)) >= -256
14152 && INTVAL (XEXP (x, 1)) <= 255)
14153 ? COSTS_N_INSNS (8) : COSTS_N_INSNS (12));
14154 return true;
14156 case PROCESSOR_RIOS2:
14157 case PROCESSOR_MPCCORE:
14158 case PROCESSOR_PPC604e:
14159 *total = COSTS_N_INSNS (2);
14160 return true;
14162 case PROCESSOR_PPC601:
14163 *total = COSTS_N_INSNS (5);
14164 return true;
14166 case PROCESSOR_PPC603:
14167 case PROCESSOR_PPC7400:
14168 case PROCESSOR_PPC750:
14169 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14170 ? COSTS_N_INSNS (5)
14171 : (INTVAL (XEXP (x, 1)) >= -256
14172 && INTVAL (XEXP (x, 1)) <= 255)
14173 ? COSTS_N_INSNS (2) : COSTS_N_INSNS (3));
14174 return true;
14176 case PROCESSOR_PPC7450:
14177 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14178 ? COSTS_N_INSNS (4)
14179 : COSTS_N_INSNS (3));
14180 return true;
14182 case PROCESSOR_PPC403:
14183 case PROCESSOR_PPC604:
14184 case PROCESSOR_PPC8540:
14185 *total = COSTS_N_INSNS (4);
14186 return true;
14188 case PROCESSOR_PPC620:
14189 case PROCESSOR_PPC630:
14190 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14191 ? GET_MODE (XEXP (x, 1)) != DImode
14192 ? COSTS_N_INSNS (5) : COSTS_N_INSNS (7)
14193 : (INTVAL (XEXP (x, 1)) >= -256
14194 && INTVAL (XEXP (x, 1)) <= 255)
14195 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
14196 return true;
14198 case PROCESSOR_POWER4:
14199 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14200 ? GET_MODE (XEXP (x, 1)) != DImode
14201 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4)
14202 : COSTS_N_INSNS (2));
14203 return true;
14205 default:
14206 abort ();
14209 case DIV:
14210 case MOD:
14211 if (GET_CODE (XEXP (x, 1)) == CONST_INT
14212 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
14214 *total = COSTS_N_INSNS (2);
14215 return true;
14217 /* FALLTHRU */
14219 case UDIV:
14220 case UMOD:
14221 switch (rs6000_cpu)
14223 case PROCESSOR_RIOS1:
14224 *total = COSTS_N_INSNS (19);
14225 return true;
14227 case PROCESSOR_RIOS2:
14228 *total = COSTS_N_INSNS (13);
14229 return true;
14231 case PROCESSOR_RS64A:
14232 *total = (GET_MODE (XEXP (x, 1)) != DImode
14233 ? COSTS_N_INSNS (65)
14234 : COSTS_N_INSNS (67));
14235 return true;
14237 case PROCESSOR_MPCCORE:
14238 *total = COSTS_N_INSNS (6);
14239 return true;
14241 case PROCESSOR_PPC403:
14242 *total = COSTS_N_INSNS (33);
14243 return true;
14245 case PROCESSOR_PPC405:
14246 *total = COSTS_N_INSNS (35);
14247 return true;
14249 case PROCESSOR_PPC440:
14250 *total = COSTS_N_INSNS (34);
14251 return true;
14253 case PROCESSOR_PPC601:
14254 *total = COSTS_N_INSNS (36);
14255 return true;
14257 case PROCESSOR_PPC603:
14258 *total = COSTS_N_INSNS (37);
14259 return true;
14261 case PROCESSOR_PPC604:
14262 case PROCESSOR_PPC604e:
14263 *total = COSTS_N_INSNS (20);
14264 return true;
14266 case PROCESSOR_PPC620:
14267 case PROCESSOR_PPC630:
14268 *total = (GET_MODE (XEXP (x, 1)) != DImode
14269 ? COSTS_N_INSNS (21)
14270 : COSTS_N_INSNS (37));
14271 return true;
14273 case PROCESSOR_PPC750:
14274 case PROCESSOR_PPC8540:
14275 case PROCESSOR_PPC7400:
14276 *total = COSTS_N_INSNS (19);
14277 return true;
14279 case PROCESSOR_PPC7450:
14280 *total = COSTS_N_INSNS (23);
14281 return true;
14283 case PROCESSOR_POWER4:
14284 *total = (GET_MODE (XEXP (x, 1)) != DImode
14285 ? COSTS_N_INSNS (18)
14286 : COSTS_N_INSNS (34));
14287 return true;
14289 default:
14290 abort ();
14293 case FFS:
14294 *total = COSTS_N_INSNS (4);
14295 return true;
14297 case MEM:
14298 /* MEM should be slightly more expensive than (plus (reg) (const)) */
14299 *total = 5;
14300 return true;
14302 default:
14303 return false;
14307 /* A C expression returning the cost of moving data from a register of class
14308 CLASS1 to one of CLASS2. */
14311 rs6000_register_move_cost (mode, from, to)
14312 enum machine_mode mode;
14313 enum reg_class from, to;
14315 /* Moves from/to GENERAL_REGS. */
14316 if (reg_classes_intersect_p (to, GENERAL_REGS)
14317 || reg_classes_intersect_p (from, GENERAL_REGS))
14319 if (! reg_classes_intersect_p (to, GENERAL_REGS))
14320 from = to;
14322 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
14323 return (rs6000_memory_move_cost (mode, from, 0)
14324 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
14326 /* It's more expensive to move CR_REGS than CR0_REGS because of the shift...*/
14327 else if (from == CR_REGS)
14328 return 4;
14330 else
14331 /* A move will cost one instruction per GPR moved. */
14332 return 2 * HARD_REGNO_NREGS (0, mode);
14335 /* Moving between two similar registers is just one instruction. */
14336 else if (reg_classes_intersect_p (to, from))
14337 return mode == TFmode ? 4 : 2;
14339 /* Everything else has to go through GENERAL_REGS. */
14340 else
14341 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
14342 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
14345 /* A C expressions returning the cost of moving data of MODE from a register to
14346 or from memory. */
14349 rs6000_memory_move_cost (mode, class, in)
14350 enum machine_mode mode;
14351 enum reg_class class;
14352 int in ATTRIBUTE_UNUSED;
14354 if (reg_classes_intersect_p (class, GENERAL_REGS))
14355 return 4 * HARD_REGNO_NREGS (0, mode);
14356 else if (reg_classes_intersect_p (class, FLOAT_REGS))
14357 return 4 * HARD_REGNO_NREGS (32, mode);
14358 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
14359 return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO, mode);
14360 else
14361 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
14364 /* Define how to find the value returned by a function.
14365 VALTYPE is the data type of the value (as a tree).
14366 If the precise function being called is known, FUNC is its FUNCTION_DECL;
14367 otherwise, FUNC is 0.
14369 On the SPE, both FPs and vectors are returned in r3.
14371 On RS/6000 an integer value is in r3 and a floating-point value is in
14372 fp1, unless -msoft-float. */
14375 rs6000_function_value (tree valtype, tree func ATTRIBUTE_UNUSED)
14377 enum machine_mode mode;
14378 unsigned int regno;
14380 if ((INTEGRAL_TYPE_P (valtype)
14381 && TYPE_PRECISION (valtype) < BITS_PER_WORD)
14382 || POINTER_TYPE_P (valtype))
14383 mode = word_mode;
14384 else
14385 mode = TYPE_MODE (valtype);
14387 if (TREE_CODE (valtype) == REAL_TYPE && TARGET_HARD_FLOAT && TARGET_FPRS)
14388 regno = FP_ARG_RETURN;
14389 else if (TREE_CODE (valtype) == VECTOR_TYPE && TARGET_ALTIVEC)
14390 regno = ALTIVEC_ARG_RETURN;
14391 else
14392 regno = GP_ARG_RETURN;
14394 return gen_rtx_REG (mode, regno);
14397 /* Return true if TYPE is of type __ev64_opaque__. */
14399 static bool
14400 is_ev64_opaque_type (type)
14401 tree type;
14403 return (TARGET_SPE
14404 && (type == opaque_V2SI_type_node
14405 || type == opaque_V2SF_type_node
14406 || type == opaque_p_V2SI_type_node
14407 || (TREE_CODE (type) == VECTOR_TYPE
14408 && TYPE_NAME (type)
14409 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
14410 && DECL_NAME (TYPE_NAME (type))
14411 && strcmp (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type))),
14412 "__ev64_opaque__") == 0)));
14415 static rtx
14416 rs6000_dwarf_register_span (reg)
14417 rtx reg;
14419 unsigned regno;
14421 if (!TARGET_SPE || !SPE_VECTOR_MODE (GET_MODE (reg)))
14422 return NULL_RTX;
14424 regno = REGNO (reg);
14426 /* The duality of the SPE register size wreaks all kinds of havoc.
14427 This is a way of distinguishing r0 in 32-bits from r0 in
14428 64-bits. */
14429 return
14430 gen_rtx_PARALLEL (VOIDmode,
14431 BYTES_BIG_ENDIAN
14432 ? gen_rtvec (2,
14433 gen_rtx_REG (SImode, regno + 1200),
14434 gen_rtx_REG (SImode, regno))
14435 : gen_rtvec (2,
14436 gen_rtx_REG (SImode, regno),
14437 gen_rtx_REG (SImode, regno + 1200)));
14440 #include "gt-rs6000.h"